diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..a6f0260516b93f4e03d2ed9e663d80b544f2af0a 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,10 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+checkpoint-1276/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-1595/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-1914/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-319/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-638/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-957/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+tokenizer.json filter=lfs diff=lfs merge=lfs -text
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..a8325c13a455ba067f3f3892fc751081d947ceeb
--- /dev/null
+++ b/README.md
@@ -0,0 +1,146 @@
+---
+library_name: peft
+license: other
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+tags:
+- generated_from_trainer
+datasets:
+- ugaoo/subset_each5k_multimedqa
+model-index:
+- name: out/subset_each5k_multimedqa
+ results: []
+---
+
+
+
+[
](https://github.com/axolotl-ai-cloud/axolotl)
+See axolotl config
+
+axolotl version: `0.8.0.dev0`
+```yaml
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+model_type: AutoModelForCausalLM
+tokenizer_type: AutoTokenizer
+trust_remote_code: true
+
+load_in_8bit: false
+load_in_4bit: true
+strict: false
+
+datasets:
+ - path: ugaoo/subset_each5k_multimedqa
+ type: alpaca
+val_set_size: 0
+output_dir: ./out/subset_each5k_multimedqa
+
+sequence_len: 4000
+sample_packing: true
+pad_to_sequence_len: true
+
+adapter: qlora
+lora_r: 256
+lora_alpha: 512
+lora_dropout: 0.05
+lora_target_linear: true
+lora_target_modules:
+ - q_proj
+ - k_proj
+ - v_proj
+ - o_proj
+ - up_proj
+ - down_proj
+ - gate_proj
+lora_modules_to_save:
+ - embed_tokens
+ - lm_head
+
+wandb_project: cosmosearch
+wandb_entity:
+wandb_watch:
+wandb_name: subset_each5k_multimedqa_Super-49B
+wandb_log_model:
+
+gradient_accumulation_steps: 3
+micro_batch_size: 4
+num_epochs: 6
+optimizer: adamw_torch
+lr_scheduler: cosine
+learning_rate: 5e-6
+
+train_on_inputs: false
+group_by_length: false
+bf16: auto
+fp16: false
+tf32: false
+
+gradient_checkpointing: true
+early_stopping_patience:
+resume_from_checkpoint:
+logging_steps: 1
+xformers_attention:
+flash_attention: true
+
+warmup_steps: 100
+evals_per_epoch: 6
+eval_table_size:
+saves_per_epoch: 1
+debug:
+deepspeed:
+weight_decay: 0.0
+fsdp:
+fsdp_config:
+save_total_limit: 6
+special_tokens:
+ pad_token: <|end_of_text|>
+
+```
+
+
+
+# out/subset_each5k_multimedqa
+
+This model is a fine-tuned version of [nvidia/Llama-3_3-Nemotron-Super-49B-v1](https://huggingface.co/nvidia/Llama-3_3-Nemotron-Super-49B-v1) on the ugaoo/subset_each5k_multimedqa dataset.
+
+## Model description
+
+More information needed
+
+## Intended uses & limitations
+
+More information needed
+
+## Training and evaluation data
+
+More information needed
+
+## Training procedure
+
+### Training hyperparameters
+
+The following hyperparameters were used during training:
+- learning_rate: 5e-06
+- train_batch_size: 4
+- eval_batch_size: 4
+- seed: 42
+- distributed_type: multi-GPU
+- num_devices: 2
+- gradient_accumulation_steps: 3
+- total_train_batch_size: 24
+- total_eval_batch_size: 8
+- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
+- lr_scheduler_type: cosine
+- lr_scheduler_warmup_steps: 100
+- num_epochs: 6.0
+
+### Training results
+
+
+
+### Framework versions
+
+- PEFT 0.15.0
+- Transformers 4.49.0
+- Pytorch 2.5.1+cu124
+- Datasets 3.4.1
+- Tokenizers 0.21.1
\ No newline at end of file
diff --git a/adapter_config.json b/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/adapter_model.safetensors b/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7d16d638ba5f1f0d6e17a6a0445d8d989c8ed984
--- /dev/null
+++ b/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a8cb98185ecb57fad2b2cd35f0c47cae364129464746319ff1be6d8581c7a3a9
+size 9016826528
diff --git a/checkpoint-1276/README.md b/checkpoint-1276/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4a3934800eeb082a0cb833d7b6af4f68eed3615
--- /dev/null
+++ b/checkpoint-1276/README.md
@@ -0,0 +1,202 @@
+---
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-1276/adapter_config.json b/checkpoint-1276/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/checkpoint-1276/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-1276/adapter_model.safetensors b/checkpoint-1276/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4b8be07df270fd4e87735aa91354c7f51137f5d6
--- /dev/null
+++ b/checkpoint-1276/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d7bde0757e769babf9dc6c014a0ce0896f1088d912615723a24277598b70c49d
+size 9016826528
diff --git a/checkpoint-1276/global_step1274/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-1276/global_step1274/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..07cf87de2769e0bd9e822fc60bf88d57b3ae7a7b
--- /dev/null
+++ b/checkpoint-1276/global_step1274/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:222bc048054940785017ef071226858bec2645b03f930461055002b65ba56e4f
+size 27050164444
diff --git a/checkpoint-1276/global_step1274/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-1276/global_step1274/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..db37057b05cf3281fbe85b3adf2d1cf698afb0f7
--- /dev/null
+++ b/checkpoint-1276/global_step1274/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5fdc005932a6d5446bc0f918fc68de05979fd84a3d4b1bda5efb335c34ececda
+size 27050169884
diff --git a/checkpoint-1276/global_step1274/mp_rank_00_model_states.pt b/checkpoint-1276/global_step1274/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0cb6e95641a13a1b52784e3eeafc59fb42aea0df
--- /dev/null
+++ b/checkpoint-1276/global_step1274/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:545433629b5dbe126a1b377a8dc22685cf8ad4ec5132093787c83469d43c9015
+size 9776788601
diff --git a/checkpoint-1276/latest b/checkpoint-1276/latest
new file mode 100644
index 0000000000000000000000000000000000000000..d33d77b6ed79554e8b85a17b6b59c2f172ea9f9e
--- /dev/null
+++ b/checkpoint-1276/latest
@@ -0,0 +1 @@
+global_step1274
\ No newline at end of file
diff --git a/checkpoint-1276/rng_state_0.pth b/checkpoint-1276/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..0d88332a38b69b81b1016c4fa69dbe65f599099a
--- /dev/null
+++ b/checkpoint-1276/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:73180648d25cd446f1429dc27f4aa551fb5f30d042b0bc24192e218ac88eca4f
+size 14512
diff --git a/checkpoint-1276/rng_state_1.pth b/checkpoint-1276/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..eb29cd6ceb687a018b46728f9e475936553186c4
--- /dev/null
+++ b/checkpoint-1276/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0a036b9448290510bd0a8a9d696995f9fa767f1e306257df7a231be1150d26a2
+size 14512
diff --git a/checkpoint-1276/scheduler.pt b/checkpoint-1276/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..809cdd5871eb1dfef67f89f3b3d929c507653f8a
--- /dev/null
+++ b/checkpoint-1276/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:561b6ce97e25a940d610e551fa7945082006574c3b6c0ab9444224cd88cf7e29
+size 1064
diff --git a/checkpoint-1276/special_tokens_map.json b/checkpoint-1276/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-1276/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-1276/tokenizer.json b/checkpoint-1276/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-1276/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-1276/tokenizer_config.json b/checkpoint-1276/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/checkpoint-1276/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-1276/trainer_state.json b/checkpoint-1276/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..41253ad9b2e2c904758595f32b00239fc143f3ed
--- /dev/null
+++ b/checkpoint-1276/trainer_state.json
@@ -0,0 +1,8965 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 3.989561586638831,
+ "eval_steps": 500,
+ "global_step": 1276,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.003131524008350731,
+ "grad_norm": 13.917898178100586,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 4.1051,
+ "step": 1
+ },
+ {
+ "epoch": 0.006263048016701462,
+ "grad_norm": 17.327869415283203,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 4.1048,
+ "step": 2
+ },
+ {
+ "epoch": 0.009394572025052192,
+ "grad_norm": 14.063946723937988,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 4.0741,
+ "step": 3
+ },
+ {
+ "epoch": 0.012526096033402923,
+ "grad_norm": 16.817699432373047,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 4.2002,
+ "step": 4
+ },
+ {
+ "epoch": 0.015657620041753653,
+ "grad_norm": 14.47036361694336,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 4.2652,
+ "step": 5
+ },
+ {
+ "epoch": 0.018789144050104383,
+ "grad_norm": 14.474193572998047,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 4.0888,
+ "step": 6
+ },
+ {
+ "epoch": 0.021920668058455117,
+ "grad_norm": 14.865458488464355,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 4.0014,
+ "step": 7
+ },
+ {
+ "epoch": 0.025052192066805846,
+ "grad_norm": 15.338888168334961,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 4.13,
+ "step": 8
+ },
+ {
+ "epoch": 0.028183716075156576,
+ "grad_norm": 15.154336929321289,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 4.2493,
+ "step": 9
+ },
+ {
+ "epoch": 0.031315240083507306,
+ "grad_norm": 15.919597625732422,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 4.0535,
+ "step": 10
+ },
+ {
+ "epoch": 0.03444676409185804,
+ "grad_norm": 14.981926918029785,
+ "learning_rate": 5.5e-07,
+ "loss": 3.9064,
+ "step": 11
+ },
+ {
+ "epoch": 0.037578288100208766,
+ "grad_norm": 13.36101245880127,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 4.1939,
+ "step": 12
+ },
+ {
+ "epoch": 0.0407098121085595,
+ "grad_norm": 15.58773422241211,
+ "learning_rate": 6.5e-07,
+ "loss": 4.18,
+ "step": 13
+ },
+ {
+ "epoch": 0.04384133611691023,
+ "grad_norm": 13.560139656066895,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 3.9414,
+ "step": 14
+ },
+ {
+ "epoch": 0.04697286012526096,
+ "grad_norm": 12.307971954345703,
+ "learning_rate": 7.5e-07,
+ "loss": 3.8836,
+ "step": 15
+ },
+ {
+ "epoch": 0.05010438413361169,
+ "grad_norm": 14.533182144165039,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 4.1551,
+ "step": 16
+ },
+ {
+ "epoch": 0.05323590814196242,
+ "grad_norm": 13.453729629516602,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 4.0048,
+ "step": 17
+ },
+ {
+ "epoch": 0.05636743215031315,
+ "grad_norm": 13.45992374420166,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 4.0745,
+ "step": 18
+ },
+ {
+ "epoch": 0.059498956158663886,
+ "grad_norm": 11.857145309448242,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 3.9871,
+ "step": 19
+ },
+ {
+ "epoch": 0.06263048016701461,
+ "grad_norm": 11.872294425964355,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 3.8959,
+ "step": 20
+ },
+ {
+ "epoch": 0.06576200417536535,
+ "grad_norm": 12.969825744628906,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 4.0308,
+ "step": 21
+ },
+ {
+ "epoch": 0.06889352818371608,
+ "grad_norm": 12.33769416809082,
+ "learning_rate": 1.1e-06,
+ "loss": 3.9341,
+ "step": 22
+ },
+ {
+ "epoch": 0.0720250521920668,
+ "grad_norm": 12.669405937194824,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 3.8511,
+ "step": 23
+ },
+ {
+ "epoch": 0.07515657620041753,
+ "grad_norm": 10.677213668823242,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 3.7764,
+ "step": 24
+ },
+ {
+ "epoch": 0.07828810020876827,
+ "grad_norm": 10.366402626037598,
+ "learning_rate": 1.25e-06,
+ "loss": 3.5291,
+ "step": 25
+ },
+ {
+ "epoch": 0.081419624217119,
+ "grad_norm": 11.211421012878418,
+ "learning_rate": 1.3e-06,
+ "loss": 3.5765,
+ "step": 26
+ },
+ {
+ "epoch": 0.08455114822546973,
+ "grad_norm": 11.313716888427734,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 3.4849,
+ "step": 27
+ },
+ {
+ "epoch": 0.08768267223382047,
+ "grad_norm": 10.41294002532959,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 3.2653,
+ "step": 28
+ },
+ {
+ "epoch": 0.09081419624217119,
+ "grad_norm": 10.40064525604248,
+ "learning_rate": 1.45e-06,
+ "loss": 3.3384,
+ "step": 29
+ },
+ {
+ "epoch": 0.09394572025052192,
+ "grad_norm": 10.05427074432373,
+ "learning_rate": 1.5e-06,
+ "loss": 3.2257,
+ "step": 30
+ },
+ {
+ "epoch": 0.09707724425887265,
+ "grad_norm": 9.583163261413574,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 3.1371,
+ "step": 31
+ },
+ {
+ "epoch": 0.10020876826722339,
+ "grad_norm": 10.09977912902832,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 3.0658,
+ "step": 32
+ },
+ {
+ "epoch": 0.10334029227557412,
+ "grad_norm": 9.271486282348633,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 2.9693,
+ "step": 33
+ },
+ {
+ "epoch": 0.10647181628392484,
+ "grad_norm": 10.687992095947266,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 2.95,
+ "step": 34
+ },
+ {
+ "epoch": 0.10960334029227557,
+ "grad_norm": 8.762290000915527,
+ "learning_rate": 1.75e-06,
+ "loss": 2.8286,
+ "step": 35
+ },
+ {
+ "epoch": 0.1127348643006263,
+ "grad_norm": 10.13785171508789,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 2.3664,
+ "step": 36
+ },
+ {
+ "epoch": 0.11586638830897704,
+ "grad_norm": 18.301353454589844,
+ "learning_rate": 1.85e-06,
+ "loss": 2.5533,
+ "step": 37
+ },
+ {
+ "epoch": 0.11899791231732777,
+ "grad_norm": 11.490377426147461,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 2.6133,
+ "step": 38
+ },
+ {
+ "epoch": 0.12212943632567849,
+ "grad_norm": 15.614163398742676,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 2.3596,
+ "step": 39
+ },
+ {
+ "epoch": 0.12526096033402923,
+ "grad_norm": 17.757442474365234,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 2.3491,
+ "step": 40
+ },
+ {
+ "epoch": 0.12839248434237996,
+ "grad_norm": 17.18431854248047,
+ "learning_rate": 2.05e-06,
+ "loss": 2.2361,
+ "step": 41
+ },
+ {
+ "epoch": 0.1315240083507307,
+ "grad_norm": 16.149789810180664,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 2.1457,
+ "step": 42
+ },
+ {
+ "epoch": 0.13465553235908143,
+ "grad_norm": 15.256914138793945,
+ "learning_rate": 2.15e-06,
+ "loss": 2.12,
+ "step": 43
+ },
+ {
+ "epoch": 0.13778705636743216,
+ "grad_norm": 15.537406921386719,
+ "learning_rate": 2.2e-06,
+ "loss": 2.1877,
+ "step": 44
+ },
+ {
+ "epoch": 0.1409185803757829,
+ "grad_norm": 7.947713851928711,
+ "learning_rate": 2.25e-06,
+ "loss": 2.1648,
+ "step": 45
+ },
+ {
+ "epoch": 0.1440501043841336,
+ "grad_norm": 8.818676948547363,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 2.134,
+ "step": 46
+ },
+ {
+ "epoch": 0.14718162839248433,
+ "grad_norm": 5.175768852233887,
+ "learning_rate": 2.35e-06,
+ "loss": 2.0796,
+ "step": 47
+ },
+ {
+ "epoch": 0.15031315240083507,
+ "grad_norm": 6.750611305236816,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 1.9174,
+ "step": 48
+ },
+ {
+ "epoch": 0.1534446764091858,
+ "grad_norm": 6.2147979736328125,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 1.8065,
+ "step": 49
+ },
+ {
+ "epoch": 0.15657620041753653,
+ "grad_norm": 13.291611671447754,
+ "learning_rate": 2.5e-06,
+ "loss": 1.7061,
+ "step": 50
+ },
+ {
+ "epoch": 0.15970772442588727,
+ "grad_norm": 7.251201629638672,
+ "learning_rate": 2.55e-06,
+ "loss": 1.7924,
+ "step": 51
+ },
+ {
+ "epoch": 0.162839248434238,
+ "grad_norm": 5.2126054763793945,
+ "learning_rate": 2.6e-06,
+ "loss": 1.6735,
+ "step": 52
+ },
+ {
+ "epoch": 0.16597077244258873,
+ "grad_norm": 5.435528755187988,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 1.6265,
+ "step": 53
+ },
+ {
+ "epoch": 0.16910229645093947,
+ "grad_norm": 4.505807399749756,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 1.4851,
+ "step": 54
+ },
+ {
+ "epoch": 0.1722338204592902,
+ "grad_norm": 5.128388404846191,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 1.5832,
+ "step": 55
+ },
+ {
+ "epoch": 0.17536534446764093,
+ "grad_norm": 16.935827255249023,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 1.6553,
+ "step": 56
+ },
+ {
+ "epoch": 0.17849686847599164,
+ "grad_norm": 3.664458990097046,
+ "learning_rate": 2.85e-06,
+ "loss": 1.5,
+ "step": 57
+ },
+ {
+ "epoch": 0.18162839248434237,
+ "grad_norm": 7.763802528381348,
+ "learning_rate": 2.9e-06,
+ "loss": 1.367,
+ "step": 58
+ },
+ {
+ "epoch": 0.1847599164926931,
+ "grad_norm": 3.2216155529022217,
+ "learning_rate": 2.95e-06,
+ "loss": 1.3863,
+ "step": 59
+ },
+ {
+ "epoch": 0.18789144050104384,
+ "grad_norm": 4.384445667266846,
+ "learning_rate": 3e-06,
+ "loss": 1.4247,
+ "step": 60
+ },
+ {
+ "epoch": 0.19102296450939457,
+ "grad_norm": 4.8080878257751465,
+ "learning_rate": 3.05e-06,
+ "loss": 1.3257,
+ "step": 61
+ },
+ {
+ "epoch": 0.1941544885177453,
+ "grad_norm": 4.154761791229248,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 1.321,
+ "step": 62
+ },
+ {
+ "epoch": 0.19728601252609604,
+ "grad_norm": 6.4742112159729,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 1.2823,
+ "step": 63
+ },
+ {
+ "epoch": 0.20041753653444677,
+ "grad_norm": 2.583422899246216,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 1.2136,
+ "step": 64
+ },
+ {
+ "epoch": 0.2035490605427975,
+ "grad_norm": 4.1933488845825195,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 1.1855,
+ "step": 65
+ },
+ {
+ "epoch": 0.20668058455114824,
+ "grad_norm": 4.11049747467041,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 1.2389,
+ "step": 66
+ },
+ {
+ "epoch": 0.20981210855949894,
+ "grad_norm": 2.264458417892456,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 1.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.21294363256784968,
+ "grad_norm": 2.5408174991607666,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 1.1389,
+ "step": 68
+ },
+ {
+ "epoch": 0.2160751565762004,
+ "grad_norm": 7.82421350479126,
+ "learning_rate": 3.45e-06,
+ "loss": 1.0956,
+ "step": 69
+ },
+ {
+ "epoch": 0.21920668058455114,
+ "grad_norm": 3.070939064025879,
+ "learning_rate": 3.5e-06,
+ "loss": 1.0451,
+ "step": 70
+ },
+ {
+ "epoch": 0.22233820459290188,
+ "grad_norm": 2.6310527324676514,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 1.0538,
+ "step": 71
+ },
+ {
+ "epoch": 0.2254697286012526,
+ "grad_norm": 7.630155563354492,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 1.0052,
+ "step": 72
+ },
+ {
+ "epoch": 0.22860125260960334,
+ "grad_norm": 6.950636863708496,
+ "learning_rate": 3.65e-06,
+ "loss": 1.0473,
+ "step": 73
+ },
+ {
+ "epoch": 0.23173277661795408,
+ "grad_norm": 2.2703945636749268,
+ "learning_rate": 3.7e-06,
+ "loss": 1.0576,
+ "step": 74
+ },
+ {
+ "epoch": 0.2348643006263048,
+ "grad_norm": 3.3817710876464844,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 1.0177,
+ "step": 75
+ },
+ {
+ "epoch": 0.23799582463465555,
+ "grad_norm": 7.266414642333984,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 1.0645,
+ "step": 76
+ },
+ {
+ "epoch": 0.24112734864300625,
+ "grad_norm": 5.782608509063721,
+ "learning_rate": 3.85e-06,
+ "loss": 1.0162,
+ "step": 77
+ },
+ {
+ "epoch": 0.24425887265135698,
+ "grad_norm": 2.7938575744628906,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.9664,
+ "step": 78
+ },
+ {
+ "epoch": 0.24739039665970772,
+ "grad_norm": 6.681935787200928,
+ "learning_rate": 3.95e-06,
+ "loss": 0.953,
+ "step": 79
+ },
+ {
+ "epoch": 0.25052192066805845,
+ "grad_norm": 2.253279209136963,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.9568,
+ "step": 80
+ },
+ {
+ "epoch": 0.2536534446764092,
+ "grad_norm": 1.4875826835632324,
+ "learning_rate": 4.05e-06,
+ "loss": 0.9448,
+ "step": 81
+ },
+ {
+ "epoch": 0.2567849686847599,
+ "grad_norm": 2.4987940788269043,
+ "learning_rate": 4.1e-06,
+ "loss": 0.9393,
+ "step": 82
+ },
+ {
+ "epoch": 0.2599164926931106,
+ "grad_norm": 4.712948322296143,
+ "learning_rate": 4.15e-06,
+ "loss": 0.9532,
+ "step": 83
+ },
+ {
+ "epoch": 0.2630480167014614,
+ "grad_norm": 6.9030632972717285,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.96,
+ "step": 84
+ },
+ {
+ "epoch": 0.2661795407098121,
+ "grad_norm": 3.4780967235565186,
+ "learning_rate": 4.25e-06,
+ "loss": 0.8993,
+ "step": 85
+ },
+ {
+ "epoch": 0.26931106471816285,
+ "grad_norm": 1.526064395904541,
+ "learning_rate": 4.3e-06,
+ "loss": 0.9021,
+ "step": 86
+ },
+ {
+ "epoch": 0.27244258872651356,
+ "grad_norm": 10.727686882019043,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.856,
+ "step": 87
+ },
+ {
+ "epoch": 0.2755741127348643,
+ "grad_norm": 12.483160972595215,
+ "learning_rate": 4.4e-06,
+ "loss": 0.9357,
+ "step": 88
+ },
+ {
+ "epoch": 0.278705636743215,
+ "grad_norm": 6.544492244720459,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.9168,
+ "step": 89
+ },
+ {
+ "epoch": 0.2818371607515658,
+ "grad_norm": 1.178139567375183,
+ "learning_rate": 4.5e-06,
+ "loss": 0.8748,
+ "step": 90
+ },
+ {
+ "epoch": 0.2849686847599165,
+ "grad_norm": 1.711506962776184,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.8425,
+ "step": 91
+ },
+ {
+ "epoch": 0.2881002087682672,
+ "grad_norm": 3.281747341156006,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.8491,
+ "step": 92
+ },
+ {
+ "epoch": 0.29123173277661796,
+ "grad_norm": 2.2964377403259277,
+ "learning_rate": 4.65e-06,
+ "loss": 0.8038,
+ "step": 93
+ },
+ {
+ "epoch": 0.29436325678496866,
+ "grad_norm": 1.959700345993042,
+ "learning_rate": 4.7e-06,
+ "loss": 0.8439,
+ "step": 94
+ },
+ {
+ "epoch": 0.2974947807933194,
+ "grad_norm": 3.979384183883667,
+ "learning_rate": 4.75e-06,
+ "loss": 0.8839,
+ "step": 95
+ },
+ {
+ "epoch": 0.30062630480167013,
+ "grad_norm": 1.4721262454986572,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.845,
+ "step": 96
+ },
+ {
+ "epoch": 0.3037578288100209,
+ "grad_norm": 2.862248659133911,
+ "learning_rate": 4.85e-06,
+ "loss": 0.7748,
+ "step": 97
+ },
+ {
+ "epoch": 0.3068893528183716,
+ "grad_norm": 3.7439088821411133,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.8145,
+ "step": 98
+ },
+ {
+ "epoch": 0.31002087682672236,
+ "grad_norm": 1.6654618978500366,
+ "learning_rate": 4.95e-06,
+ "loss": 0.8326,
+ "step": 99
+ },
+ {
+ "epoch": 0.31315240083507306,
+ "grad_norm": 7.8437581062316895,
+ "learning_rate": 5e-06,
+ "loss": 0.8666,
+ "step": 100
+ },
+ {
+ "epoch": 0.3162839248434238,
+ "grad_norm": 6.429738521575928,
+ "learning_rate": 4.999996250830422e-06,
+ "loss": 0.836,
+ "step": 101
+ },
+ {
+ "epoch": 0.31941544885177453,
+ "grad_norm": 2.6017794609069824,
+ "learning_rate": 4.9999850033329326e-06,
+ "loss": 0.7785,
+ "step": 102
+ },
+ {
+ "epoch": 0.32254697286012524,
+ "grad_norm": 1.0575449466705322,
+ "learning_rate": 4.999966257541265e-06,
+ "loss": 0.7639,
+ "step": 103
+ },
+ {
+ "epoch": 0.325678496868476,
+ "grad_norm": 2.6932010650634766,
+ "learning_rate": 4.999940013511647e-06,
+ "loss": 0.8214,
+ "step": 104
+ },
+ {
+ "epoch": 0.3288100208768267,
+ "grad_norm": 2.925288438796997,
+ "learning_rate": 4.999906271322792e-06,
+ "loss": 0.8797,
+ "step": 105
+ },
+ {
+ "epoch": 0.33194154488517746,
+ "grad_norm": 1.3570607900619507,
+ "learning_rate": 4.9998650310759035e-06,
+ "loss": 0.792,
+ "step": 106
+ },
+ {
+ "epoch": 0.33507306889352817,
+ "grad_norm": 5.126713752746582,
+ "learning_rate": 4.999816292894676e-06,
+ "loss": 0.8352,
+ "step": 107
+ },
+ {
+ "epoch": 0.33820459290187893,
+ "grad_norm": 1.8966432809829712,
+ "learning_rate": 4.99976005692529e-06,
+ "loss": 0.7663,
+ "step": 108
+ },
+ {
+ "epoch": 0.34133611691022964,
+ "grad_norm": 1.3100829124450684,
+ "learning_rate": 4.999696323336418e-06,
+ "loss": 0.771,
+ "step": 109
+ },
+ {
+ "epoch": 0.3444676409185804,
+ "grad_norm": 2.4025354385375977,
+ "learning_rate": 4.999625092319218e-06,
+ "loss": 0.7618,
+ "step": 110
+ },
+ {
+ "epoch": 0.3475991649269311,
+ "grad_norm": 1.130232810974121,
+ "learning_rate": 4.999546364087334e-06,
+ "loss": 0.7705,
+ "step": 111
+ },
+ {
+ "epoch": 0.35073068893528186,
+ "grad_norm": 3.430262327194214,
+ "learning_rate": 4.999460138876901e-06,
+ "loss": 0.77,
+ "step": 112
+ },
+ {
+ "epoch": 0.35386221294363257,
+ "grad_norm": 1.1272103786468506,
+ "learning_rate": 4.999366416946536e-06,
+ "loss": 0.7133,
+ "step": 113
+ },
+ {
+ "epoch": 0.3569937369519833,
+ "grad_norm": 1.1740471124649048,
+ "learning_rate": 4.999265198577342e-06,
+ "loss": 0.7684,
+ "step": 114
+ },
+ {
+ "epoch": 0.36012526096033404,
+ "grad_norm": 1.3138248920440674,
+ "learning_rate": 4.999156484072907e-06,
+ "loss": 0.7888,
+ "step": 115
+ },
+ {
+ "epoch": 0.36325678496868474,
+ "grad_norm": 1.061711311340332,
+ "learning_rate": 4.999040273759304e-06,
+ "loss": 0.7484,
+ "step": 116
+ },
+ {
+ "epoch": 0.3663883089770355,
+ "grad_norm": 1.4682390689849854,
+ "learning_rate": 4.998916567985083e-06,
+ "loss": 0.7296,
+ "step": 117
+ },
+ {
+ "epoch": 0.3695198329853862,
+ "grad_norm": 2.884068250656128,
+ "learning_rate": 4.998785367121284e-06,
+ "loss": 0.7662,
+ "step": 118
+ },
+ {
+ "epoch": 0.37265135699373697,
+ "grad_norm": 0.9812761545181274,
+ "learning_rate": 4.9986466715614205e-06,
+ "loss": 0.7307,
+ "step": 119
+ },
+ {
+ "epoch": 0.3757828810020877,
+ "grad_norm": 2.2237496376037598,
+ "learning_rate": 4.998500481721484e-06,
+ "loss": 0.6761,
+ "step": 120
+ },
+ {
+ "epoch": 0.37891440501043844,
+ "grad_norm": 1.4004178047180176,
+ "learning_rate": 4.998346798039952e-06,
+ "loss": 0.7505,
+ "step": 121
+ },
+ {
+ "epoch": 0.38204592901878914,
+ "grad_norm": 5.54975700378418,
+ "learning_rate": 4.99818562097777e-06,
+ "loss": 0.7615,
+ "step": 122
+ },
+ {
+ "epoch": 0.38517745302713985,
+ "grad_norm": 6.17140531539917,
+ "learning_rate": 4.9980169510183624e-06,
+ "loss": 0.7002,
+ "step": 123
+ },
+ {
+ "epoch": 0.3883089770354906,
+ "grad_norm": 4.974380016326904,
+ "learning_rate": 4.997840788667628e-06,
+ "loss": 0.7449,
+ "step": 124
+ },
+ {
+ "epoch": 0.3914405010438413,
+ "grad_norm": 1.4133399724960327,
+ "learning_rate": 4.997657134453937e-06,
+ "loss": 0.7442,
+ "step": 125
+ },
+ {
+ "epoch": 0.3945720250521921,
+ "grad_norm": 1.868915319442749,
+ "learning_rate": 4.9974659889281295e-06,
+ "loss": 0.7104,
+ "step": 126
+ },
+ {
+ "epoch": 0.3977035490605428,
+ "grad_norm": 1.2599350214004517,
+ "learning_rate": 4.997267352663514e-06,
+ "loss": 0.7385,
+ "step": 127
+ },
+ {
+ "epoch": 0.40083507306889354,
+ "grad_norm": 1.4353641271591187,
+ "learning_rate": 4.997061226255869e-06,
+ "loss": 0.7081,
+ "step": 128
+ },
+ {
+ "epoch": 0.40396659707724425,
+ "grad_norm": 3.2492141723632812,
+ "learning_rate": 4.996847610323437e-06,
+ "loss": 0.7859,
+ "step": 129
+ },
+ {
+ "epoch": 0.407098121085595,
+ "grad_norm": 9.599719047546387,
+ "learning_rate": 4.996626505506923e-06,
+ "loss": 0.7241,
+ "step": 130
+ },
+ {
+ "epoch": 0.4102296450939457,
+ "grad_norm": 10.053650856018066,
+ "learning_rate": 4.996397912469494e-06,
+ "loss": 0.6841,
+ "step": 131
+ },
+ {
+ "epoch": 0.4133611691022965,
+ "grad_norm": 1.323876976966858,
+ "learning_rate": 4.996161831896777e-06,
+ "loss": 0.7317,
+ "step": 132
+ },
+ {
+ "epoch": 0.4164926931106472,
+ "grad_norm": 1.4180598258972168,
+ "learning_rate": 4.9959182644968594e-06,
+ "loss": 0.692,
+ "step": 133
+ },
+ {
+ "epoch": 0.4196242171189979,
+ "grad_norm": 1.2194396257400513,
+ "learning_rate": 4.99566721100028e-06,
+ "loss": 0.7068,
+ "step": 134
+ },
+ {
+ "epoch": 0.42275574112734865,
+ "grad_norm": 1.0984960794448853,
+ "learning_rate": 4.995408672160031e-06,
+ "loss": 0.6946,
+ "step": 135
+ },
+ {
+ "epoch": 0.42588726513569936,
+ "grad_norm": 1.9341071844100952,
+ "learning_rate": 4.995142648751561e-06,
+ "loss": 0.7467,
+ "step": 136
+ },
+ {
+ "epoch": 0.4290187891440501,
+ "grad_norm": 1.9960932731628418,
+ "learning_rate": 4.9948691415727594e-06,
+ "loss": 0.7379,
+ "step": 137
+ },
+ {
+ "epoch": 0.4321503131524008,
+ "grad_norm": 0.8743917942047119,
+ "learning_rate": 4.994588151443968e-06,
+ "loss": 0.66,
+ "step": 138
+ },
+ {
+ "epoch": 0.4352818371607516,
+ "grad_norm": 0.8655261993408203,
+ "learning_rate": 4.99429967920797e-06,
+ "loss": 0.6646,
+ "step": 139
+ },
+ {
+ "epoch": 0.4384133611691023,
+ "grad_norm": 5.462070941925049,
+ "learning_rate": 4.994003725729992e-06,
+ "loss": 0.643,
+ "step": 140
+ },
+ {
+ "epoch": 0.44154488517745305,
+ "grad_norm": 2.1401469707489014,
+ "learning_rate": 4.993700291897695e-06,
+ "loss": 0.6639,
+ "step": 141
+ },
+ {
+ "epoch": 0.44467640918580376,
+ "grad_norm": 1.8219833374023438,
+ "learning_rate": 4.9933893786211815e-06,
+ "loss": 0.6673,
+ "step": 142
+ },
+ {
+ "epoch": 0.44780793319415446,
+ "grad_norm": 1.641079306602478,
+ "learning_rate": 4.993070986832984e-06,
+ "loss": 0.658,
+ "step": 143
+ },
+ {
+ "epoch": 0.4509394572025052,
+ "grad_norm": 1.1739819049835205,
+ "learning_rate": 4.992745117488066e-06,
+ "loss": 0.6826,
+ "step": 144
+ },
+ {
+ "epoch": 0.45407098121085593,
+ "grad_norm": 2.309185743331909,
+ "learning_rate": 4.9924117715638185e-06,
+ "loss": 0.6536,
+ "step": 145
+ },
+ {
+ "epoch": 0.4572025052192067,
+ "grad_norm": 1.09304940700531,
+ "learning_rate": 4.99207095006006e-06,
+ "loss": 0.721,
+ "step": 146
+ },
+ {
+ "epoch": 0.4603340292275574,
+ "grad_norm": 0.9056984186172485,
+ "learning_rate": 4.991722653999025e-06,
+ "loss": 0.7019,
+ "step": 147
+ },
+ {
+ "epoch": 0.46346555323590816,
+ "grad_norm": 1.8440625667572021,
+ "learning_rate": 4.991366884425374e-06,
+ "loss": 0.707,
+ "step": 148
+ },
+ {
+ "epoch": 0.46659707724425886,
+ "grad_norm": 1.2244676351547241,
+ "learning_rate": 4.991003642406177e-06,
+ "loss": 0.6407,
+ "step": 149
+ },
+ {
+ "epoch": 0.4697286012526096,
+ "grad_norm": 0.9258589744567871,
+ "learning_rate": 4.99063292903092e-06,
+ "loss": 0.6954,
+ "step": 150
+ },
+ {
+ "epoch": 0.47286012526096033,
+ "grad_norm": 4.176390647888184,
+ "learning_rate": 4.990254745411496e-06,
+ "loss": 0.6812,
+ "step": 151
+ },
+ {
+ "epoch": 0.4759916492693111,
+ "grad_norm": 1.4322530031204224,
+ "learning_rate": 4.989869092682205e-06,
+ "loss": 0.6808,
+ "step": 152
+ },
+ {
+ "epoch": 0.4791231732776618,
+ "grad_norm": 0.8017717003822327,
+ "learning_rate": 4.989475971999748e-06,
+ "loss": 0.687,
+ "step": 153
+ },
+ {
+ "epoch": 0.4822546972860125,
+ "grad_norm": 1.5641374588012695,
+ "learning_rate": 4.989075384543228e-06,
+ "loss": 0.6599,
+ "step": 154
+ },
+ {
+ "epoch": 0.48538622129436326,
+ "grad_norm": 1.1522141695022583,
+ "learning_rate": 4.98866733151414e-06,
+ "loss": 0.6546,
+ "step": 155
+ },
+ {
+ "epoch": 0.48851774530271397,
+ "grad_norm": 0.8593171238899231,
+ "learning_rate": 4.988251814136372e-06,
+ "loss": 0.6857,
+ "step": 156
+ },
+ {
+ "epoch": 0.49164926931106473,
+ "grad_norm": 2.668159246444702,
+ "learning_rate": 4.9878288336562e-06,
+ "loss": 0.661,
+ "step": 157
+ },
+ {
+ "epoch": 0.49478079331941544,
+ "grad_norm": 0.9953671097755432,
+ "learning_rate": 4.987398391342285e-06,
+ "loss": 0.6512,
+ "step": 158
+ },
+ {
+ "epoch": 0.4979123173277662,
+ "grad_norm": 1.042872667312622,
+ "learning_rate": 4.986960488485667e-06,
+ "loss": 0.6311,
+ "step": 159
+ },
+ {
+ "epoch": 0.5010438413361169,
+ "grad_norm": 0.9070663452148438,
+ "learning_rate": 4.9865151263997645e-06,
+ "loss": 0.675,
+ "step": 160
+ },
+ {
+ "epoch": 0.5041753653444676,
+ "grad_norm": 0.8460433483123779,
+ "learning_rate": 4.986062306420367e-06,
+ "loss": 0.6635,
+ "step": 161
+ },
+ {
+ "epoch": 0.5073068893528184,
+ "grad_norm": 1.2639834880828857,
+ "learning_rate": 4.985602029905635e-06,
+ "loss": 0.6327,
+ "step": 162
+ },
+ {
+ "epoch": 0.5104384133611691,
+ "grad_norm": 0.8775074481964111,
+ "learning_rate": 4.985134298236091e-06,
+ "loss": 0.644,
+ "step": 163
+ },
+ {
+ "epoch": 0.5135699373695198,
+ "grad_norm": 1.2031961679458618,
+ "learning_rate": 4.98465911281462e-06,
+ "loss": 0.6254,
+ "step": 164
+ },
+ {
+ "epoch": 0.5167014613778705,
+ "grad_norm": 0.892494797706604,
+ "learning_rate": 4.984176475066463e-06,
+ "loss": 0.7122,
+ "step": 165
+ },
+ {
+ "epoch": 0.5198329853862212,
+ "grad_norm": 2.7122485637664795,
+ "learning_rate": 4.983686386439212e-06,
+ "loss": 0.6679,
+ "step": 166
+ },
+ {
+ "epoch": 0.5229645093945721,
+ "grad_norm": 0.9344426989555359,
+ "learning_rate": 4.983188848402806e-06,
+ "loss": 0.6319,
+ "step": 167
+ },
+ {
+ "epoch": 0.5260960334029228,
+ "grad_norm": 1.4093577861785889,
+ "learning_rate": 4.982683862449531e-06,
+ "loss": 0.6425,
+ "step": 168
+ },
+ {
+ "epoch": 0.5292275574112735,
+ "grad_norm": 1.1285009384155273,
+ "learning_rate": 4.982171430094007e-06,
+ "loss": 0.6298,
+ "step": 169
+ },
+ {
+ "epoch": 0.5323590814196242,
+ "grad_norm": 1.952778935432434,
+ "learning_rate": 4.981651552873193e-06,
+ "loss": 0.7066,
+ "step": 170
+ },
+ {
+ "epoch": 0.535490605427975,
+ "grad_norm": 5.133765697479248,
+ "learning_rate": 4.981124232346374e-06,
+ "loss": 0.6634,
+ "step": 171
+ },
+ {
+ "epoch": 0.5386221294363257,
+ "grad_norm": 0.9770542979240417,
+ "learning_rate": 4.980589470095161e-06,
+ "loss": 0.7121,
+ "step": 172
+ },
+ {
+ "epoch": 0.5417536534446764,
+ "grad_norm": 0.8414323925971985,
+ "learning_rate": 4.980047267723487e-06,
+ "loss": 0.6397,
+ "step": 173
+ },
+ {
+ "epoch": 0.5448851774530271,
+ "grad_norm": 1.9173879623413086,
+ "learning_rate": 4.979497626857596e-06,
+ "loss": 0.6228,
+ "step": 174
+ },
+ {
+ "epoch": 0.5480167014613778,
+ "grad_norm": 1.0823363065719604,
+ "learning_rate": 4.978940549146048e-06,
+ "loss": 0.6475,
+ "step": 175
+ },
+ {
+ "epoch": 0.5511482254697286,
+ "grad_norm": 3.715353488922119,
+ "learning_rate": 4.978376036259706e-06,
+ "loss": 0.7127,
+ "step": 176
+ },
+ {
+ "epoch": 0.5542797494780793,
+ "grad_norm": 0.981584370136261,
+ "learning_rate": 4.9778040898917325e-06,
+ "loss": 0.6468,
+ "step": 177
+ },
+ {
+ "epoch": 0.55741127348643,
+ "grad_norm": 1.70566987991333,
+ "learning_rate": 4.977224711757587e-06,
+ "loss": 0.6476,
+ "step": 178
+ },
+ {
+ "epoch": 0.5605427974947808,
+ "grad_norm": 0.9217923283576965,
+ "learning_rate": 4.976637903595019e-06,
+ "loss": 0.6731,
+ "step": 179
+ },
+ {
+ "epoch": 0.5636743215031316,
+ "grad_norm": 0.8994677662849426,
+ "learning_rate": 4.976043667164063e-06,
+ "loss": 0.6562,
+ "step": 180
+ },
+ {
+ "epoch": 0.5668058455114823,
+ "grad_norm": 1.1613017320632935,
+ "learning_rate": 4.975442004247034e-06,
+ "loss": 0.6417,
+ "step": 181
+ },
+ {
+ "epoch": 0.569937369519833,
+ "grad_norm": 1.6041977405548096,
+ "learning_rate": 4.974832916648521e-06,
+ "loss": 0.6029,
+ "step": 182
+ },
+ {
+ "epoch": 0.5730688935281837,
+ "grad_norm": 1.7978405952453613,
+ "learning_rate": 4.974216406195383e-06,
+ "loss": 0.6269,
+ "step": 183
+ },
+ {
+ "epoch": 0.5762004175365344,
+ "grad_norm": 1.6021920442581177,
+ "learning_rate": 4.973592474736739e-06,
+ "loss": 0.6149,
+ "step": 184
+ },
+ {
+ "epoch": 0.5793319415448852,
+ "grad_norm": 0.8973568677902222,
+ "learning_rate": 4.972961124143971e-06,
+ "loss": 0.6648,
+ "step": 185
+ },
+ {
+ "epoch": 0.5824634655532359,
+ "grad_norm": 1.9432591199874878,
+ "learning_rate": 4.972322356310711e-06,
+ "loss": 0.6299,
+ "step": 186
+ },
+ {
+ "epoch": 0.5855949895615866,
+ "grad_norm": 4.457028388977051,
+ "learning_rate": 4.971676173152839e-06,
+ "loss": 0.656,
+ "step": 187
+ },
+ {
+ "epoch": 0.5887265135699373,
+ "grad_norm": 2.0989716053009033,
+ "learning_rate": 4.971022576608473e-06,
+ "loss": 0.6539,
+ "step": 188
+ },
+ {
+ "epoch": 0.5918580375782881,
+ "grad_norm": 1.0646967887878418,
+ "learning_rate": 4.97036156863797e-06,
+ "loss": 0.6727,
+ "step": 189
+ },
+ {
+ "epoch": 0.5949895615866388,
+ "grad_norm": 1.6522265672683716,
+ "learning_rate": 4.969693151223914e-06,
+ "loss": 0.6643,
+ "step": 190
+ },
+ {
+ "epoch": 0.5981210855949896,
+ "grad_norm": 1.7503505945205688,
+ "learning_rate": 4.969017326371115e-06,
+ "loss": 0.6402,
+ "step": 191
+ },
+ {
+ "epoch": 0.6012526096033403,
+ "grad_norm": 1.2341989278793335,
+ "learning_rate": 4.968334096106597e-06,
+ "loss": 0.6413,
+ "step": 192
+ },
+ {
+ "epoch": 0.6043841336116911,
+ "grad_norm": 3.089054584503174,
+ "learning_rate": 4.967643462479597e-06,
+ "loss": 0.6825,
+ "step": 193
+ },
+ {
+ "epoch": 0.6075156576200418,
+ "grad_norm": 2.711623430252075,
+ "learning_rate": 4.966945427561557e-06,
+ "loss": 0.65,
+ "step": 194
+ },
+ {
+ "epoch": 0.6106471816283925,
+ "grad_norm": 4.641184329986572,
+ "learning_rate": 4.966239993446118e-06,
+ "loss": 0.6229,
+ "step": 195
+ },
+ {
+ "epoch": 0.6137787056367432,
+ "grad_norm": 1.7984074354171753,
+ "learning_rate": 4.965527162249114e-06,
+ "loss": 0.6473,
+ "step": 196
+ },
+ {
+ "epoch": 0.6169102296450939,
+ "grad_norm": 1.1643115282058716,
+ "learning_rate": 4.964806936108566e-06,
+ "loss": 0.6404,
+ "step": 197
+ },
+ {
+ "epoch": 0.6200417536534447,
+ "grad_norm": 2.1877920627593994,
+ "learning_rate": 4.9640793171846725e-06,
+ "loss": 0.6185,
+ "step": 198
+ },
+ {
+ "epoch": 0.6231732776617954,
+ "grad_norm": 1.7970566749572754,
+ "learning_rate": 4.963344307659807e-06,
+ "loss": 0.634,
+ "step": 199
+ },
+ {
+ "epoch": 0.6263048016701461,
+ "grad_norm": 1.6014361381530762,
+ "learning_rate": 4.96260190973851e-06,
+ "loss": 0.6562,
+ "step": 200
+ },
+ {
+ "epoch": 0.6294363256784968,
+ "grad_norm": 0.8743320107460022,
+ "learning_rate": 4.961852125647482e-06,
+ "loss": 0.6133,
+ "step": 201
+ },
+ {
+ "epoch": 0.6325678496868476,
+ "grad_norm": 1.9526551961898804,
+ "learning_rate": 4.961094957635578e-06,
+ "loss": 0.6451,
+ "step": 202
+ },
+ {
+ "epoch": 0.6356993736951984,
+ "grad_norm": 3.6597347259521484,
+ "learning_rate": 4.960330407973798e-06,
+ "loss": 0.6386,
+ "step": 203
+ },
+ {
+ "epoch": 0.6388308977035491,
+ "grad_norm": 1.7180207967758179,
+ "learning_rate": 4.959558478955283e-06,
+ "loss": 0.6688,
+ "step": 204
+ },
+ {
+ "epoch": 0.6419624217118998,
+ "grad_norm": 0.9058470129966736,
+ "learning_rate": 4.958779172895308e-06,
+ "loss": 0.6161,
+ "step": 205
+ },
+ {
+ "epoch": 0.6450939457202505,
+ "grad_norm": 1.0031033754348755,
+ "learning_rate": 4.957992492131274e-06,
+ "loss": 0.6437,
+ "step": 206
+ },
+ {
+ "epoch": 0.6482254697286013,
+ "grad_norm": 1.5846725702285767,
+ "learning_rate": 4.9571984390226985e-06,
+ "loss": 0.6332,
+ "step": 207
+ },
+ {
+ "epoch": 0.651356993736952,
+ "grad_norm": 1.9951609373092651,
+ "learning_rate": 4.956397015951215e-06,
+ "loss": 0.636,
+ "step": 208
+ },
+ {
+ "epoch": 0.6544885177453027,
+ "grad_norm": 1.4122583866119385,
+ "learning_rate": 4.95558822532056e-06,
+ "loss": 0.6586,
+ "step": 209
+ },
+ {
+ "epoch": 0.6576200417536534,
+ "grad_norm": 1.2243481874465942,
+ "learning_rate": 4.954772069556568e-06,
+ "loss": 0.6313,
+ "step": 210
+ },
+ {
+ "epoch": 0.6607515657620042,
+ "grad_norm": 0.8756356835365295,
+ "learning_rate": 4.953948551107164e-06,
+ "loss": 0.6406,
+ "step": 211
+ },
+ {
+ "epoch": 0.6638830897703549,
+ "grad_norm": 2.9979734420776367,
+ "learning_rate": 4.953117672442356e-06,
+ "loss": 0.5803,
+ "step": 212
+ },
+ {
+ "epoch": 0.6670146137787056,
+ "grad_norm": 2.1859359741210938,
+ "learning_rate": 4.952279436054229e-06,
+ "loss": 0.6607,
+ "step": 213
+ },
+ {
+ "epoch": 0.6701461377870563,
+ "grad_norm": 0.6929755806922913,
+ "learning_rate": 4.9514338444569346e-06,
+ "loss": 0.5989,
+ "step": 214
+ },
+ {
+ "epoch": 0.673277661795407,
+ "grad_norm": 1.0361783504486084,
+ "learning_rate": 4.950580900186685e-06,
+ "loss": 0.6654,
+ "step": 215
+ },
+ {
+ "epoch": 0.6764091858037579,
+ "grad_norm": 1.210898518562317,
+ "learning_rate": 4.9497206058017475e-06,
+ "loss": 0.6213,
+ "step": 216
+ },
+ {
+ "epoch": 0.6795407098121086,
+ "grad_norm": 1.200990080833435,
+ "learning_rate": 4.948852963882434e-06,
+ "loss": 0.6654,
+ "step": 217
+ },
+ {
+ "epoch": 0.6826722338204593,
+ "grad_norm": 1.481831669807434,
+ "learning_rate": 4.947977977031093e-06,
+ "loss": 0.6474,
+ "step": 218
+ },
+ {
+ "epoch": 0.68580375782881,
+ "grad_norm": 0.9883334636688232,
+ "learning_rate": 4.947095647872103e-06,
+ "loss": 0.6735,
+ "step": 219
+ },
+ {
+ "epoch": 0.6889352818371608,
+ "grad_norm": 0.7436536550521851,
+ "learning_rate": 4.946205979051868e-06,
+ "loss": 0.6456,
+ "step": 220
+ },
+ {
+ "epoch": 0.6920668058455115,
+ "grad_norm": 0.9057570099830627,
+ "learning_rate": 4.945308973238802e-06,
+ "loss": 0.6228,
+ "step": 221
+ },
+ {
+ "epoch": 0.6951983298538622,
+ "grad_norm": 1.341081142425537,
+ "learning_rate": 4.944404633123324e-06,
+ "loss": 0.6417,
+ "step": 222
+ },
+ {
+ "epoch": 0.6983298538622129,
+ "grad_norm": 0.7958157062530518,
+ "learning_rate": 4.943492961417859e-06,
+ "loss": 0.6494,
+ "step": 223
+ },
+ {
+ "epoch": 0.7014613778705637,
+ "grad_norm": 1.216025471687317,
+ "learning_rate": 4.9425739608568106e-06,
+ "loss": 0.6566,
+ "step": 224
+ },
+ {
+ "epoch": 0.7045929018789144,
+ "grad_norm": 0.9774854779243469,
+ "learning_rate": 4.9416476341965735e-06,
+ "loss": 0.6171,
+ "step": 225
+ },
+ {
+ "epoch": 0.7077244258872651,
+ "grad_norm": 2.1562681198120117,
+ "learning_rate": 4.940713984215512e-06,
+ "loss": 0.629,
+ "step": 226
+ },
+ {
+ "epoch": 0.7108559498956158,
+ "grad_norm": 1.9521286487579346,
+ "learning_rate": 4.9397730137139556e-06,
+ "loss": 0.6475,
+ "step": 227
+ },
+ {
+ "epoch": 0.7139874739039666,
+ "grad_norm": 1.5749104022979736,
+ "learning_rate": 4.9388247255141895e-06,
+ "loss": 0.6053,
+ "step": 228
+ },
+ {
+ "epoch": 0.7171189979123174,
+ "grad_norm": 1.2008254528045654,
+ "learning_rate": 4.937869122460449e-06,
+ "loss": 0.6052,
+ "step": 229
+ },
+ {
+ "epoch": 0.7202505219206681,
+ "grad_norm": 1.0774102210998535,
+ "learning_rate": 4.93690620741891e-06,
+ "loss": 0.6099,
+ "step": 230
+ },
+ {
+ "epoch": 0.7233820459290188,
+ "grad_norm": 1.0929996967315674,
+ "learning_rate": 4.935935983277675e-06,
+ "loss": 0.6363,
+ "step": 231
+ },
+ {
+ "epoch": 0.7265135699373695,
+ "grad_norm": 0.8830653429031372,
+ "learning_rate": 4.934958452946774e-06,
+ "loss": 0.6136,
+ "step": 232
+ },
+ {
+ "epoch": 0.7296450939457203,
+ "grad_norm": 3.591218948364258,
+ "learning_rate": 4.933973619358147e-06,
+ "loss": 0.5962,
+ "step": 233
+ },
+ {
+ "epoch": 0.732776617954071,
+ "grad_norm": 2.5797672271728516,
+ "learning_rate": 4.932981485465643e-06,
+ "loss": 0.6405,
+ "step": 234
+ },
+ {
+ "epoch": 0.7359081419624217,
+ "grad_norm": 1.0467664003372192,
+ "learning_rate": 4.9319820542450025e-06,
+ "loss": 0.6155,
+ "step": 235
+ },
+ {
+ "epoch": 0.7390396659707724,
+ "grad_norm": 0.8099795579910278,
+ "learning_rate": 4.930975328693856e-06,
+ "loss": 0.5615,
+ "step": 236
+ },
+ {
+ "epoch": 0.7421711899791231,
+ "grad_norm": 0.8906702995300293,
+ "learning_rate": 4.92996131183171e-06,
+ "loss": 0.6501,
+ "step": 237
+ },
+ {
+ "epoch": 0.7453027139874739,
+ "grad_norm": 1.0871416330337524,
+ "learning_rate": 4.928940006699944e-06,
+ "loss": 0.6282,
+ "step": 238
+ },
+ {
+ "epoch": 0.7484342379958246,
+ "grad_norm": 1.3209614753723145,
+ "learning_rate": 4.927911416361792e-06,
+ "loss": 0.598,
+ "step": 239
+ },
+ {
+ "epoch": 0.7515657620041754,
+ "grad_norm": 1.2252682447433472,
+ "learning_rate": 4.926875543902344e-06,
+ "loss": 0.6433,
+ "step": 240
+ },
+ {
+ "epoch": 0.7546972860125261,
+ "grad_norm": 1.0569007396697998,
+ "learning_rate": 4.9258323924285285e-06,
+ "loss": 0.5927,
+ "step": 241
+ },
+ {
+ "epoch": 0.7578288100208769,
+ "grad_norm": 0.9309014081954956,
+ "learning_rate": 4.924781965069106e-06,
+ "loss": 0.5927,
+ "step": 242
+ },
+ {
+ "epoch": 0.7609603340292276,
+ "grad_norm": 1.0200378894805908,
+ "learning_rate": 4.923724264974662e-06,
+ "loss": 0.6064,
+ "step": 243
+ },
+ {
+ "epoch": 0.7640918580375783,
+ "grad_norm": 1.0533075332641602,
+ "learning_rate": 4.922659295317593e-06,
+ "loss": 0.6373,
+ "step": 244
+ },
+ {
+ "epoch": 0.767223382045929,
+ "grad_norm": 0.7889382839202881,
+ "learning_rate": 4.921587059292102e-06,
+ "loss": 0.5887,
+ "step": 245
+ },
+ {
+ "epoch": 0.7703549060542797,
+ "grad_norm": 0.7943588495254517,
+ "learning_rate": 4.920507560114183e-06,
+ "loss": 0.593,
+ "step": 246
+ },
+ {
+ "epoch": 0.7734864300626305,
+ "grad_norm": 0.8247205018997192,
+ "learning_rate": 4.919420801021617e-06,
+ "loss": 0.6151,
+ "step": 247
+ },
+ {
+ "epoch": 0.7766179540709812,
+ "grad_norm": 0.9979158043861389,
+ "learning_rate": 4.91832678527396e-06,
+ "loss": 0.6019,
+ "step": 248
+ },
+ {
+ "epoch": 0.7797494780793319,
+ "grad_norm": 0.9346868991851807,
+ "learning_rate": 4.917225516152532e-06,
+ "loss": 0.6098,
+ "step": 249
+ },
+ {
+ "epoch": 0.7828810020876826,
+ "grad_norm": 0.7487881183624268,
+ "learning_rate": 4.916116996960408e-06,
+ "loss": 0.5965,
+ "step": 250
+ },
+ {
+ "epoch": 0.7860125260960334,
+ "grad_norm": 0.821576714515686,
+ "learning_rate": 4.915001231022411e-06,
+ "loss": 0.6483,
+ "step": 251
+ },
+ {
+ "epoch": 0.7891440501043842,
+ "grad_norm": 1.0413196086883545,
+ "learning_rate": 4.913878221685096e-06,
+ "loss": 0.6108,
+ "step": 252
+ },
+ {
+ "epoch": 0.7922755741127349,
+ "grad_norm": 0.9560331702232361,
+ "learning_rate": 4.912747972316745e-06,
+ "loss": 0.5758,
+ "step": 253
+ },
+ {
+ "epoch": 0.7954070981210856,
+ "grad_norm": 0.8964638113975525,
+ "learning_rate": 4.911610486307356e-06,
+ "loss": 0.6432,
+ "step": 254
+ },
+ {
+ "epoch": 0.7985386221294363,
+ "grad_norm": 0.8418346047401428,
+ "learning_rate": 4.910465767068631e-06,
+ "loss": 0.6027,
+ "step": 255
+ },
+ {
+ "epoch": 0.8016701461377871,
+ "grad_norm": 1.792371153831482,
+ "learning_rate": 4.909313818033966e-06,
+ "loss": 0.6198,
+ "step": 256
+ },
+ {
+ "epoch": 0.8048016701461378,
+ "grad_norm": 1.036665439605713,
+ "learning_rate": 4.908154642658446e-06,
+ "loss": 0.6255,
+ "step": 257
+ },
+ {
+ "epoch": 0.8079331941544885,
+ "grad_norm": 0.7592151165008545,
+ "learning_rate": 4.906988244418823e-06,
+ "loss": 0.6035,
+ "step": 258
+ },
+ {
+ "epoch": 0.8110647181628392,
+ "grad_norm": 0.8843073844909668,
+ "learning_rate": 4.90581462681352e-06,
+ "loss": 0.6299,
+ "step": 259
+ },
+ {
+ "epoch": 0.81419624217119,
+ "grad_norm": 0.9489964246749878,
+ "learning_rate": 4.9046337933626086e-06,
+ "loss": 0.5869,
+ "step": 260
+ },
+ {
+ "epoch": 0.8173277661795407,
+ "grad_norm": 0.851691722869873,
+ "learning_rate": 4.903445747607806e-06,
+ "loss": 0.603,
+ "step": 261
+ },
+ {
+ "epoch": 0.8204592901878914,
+ "grad_norm": 1.3722106218338013,
+ "learning_rate": 4.902250493112458e-06,
+ "loss": 0.5939,
+ "step": 262
+ },
+ {
+ "epoch": 0.8235908141962421,
+ "grad_norm": 1.1002827882766724,
+ "learning_rate": 4.901048033461537e-06,
+ "loss": 0.6452,
+ "step": 263
+ },
+ {
+ "epoch": 0.826722338204593,
+ "grad_norm": 0.8428632020950317,
+ "learning_rate": 4.89983837226162e-06,
+ "loss": 0.5956,
+ "step": 264
+ },
+ {
+ "epoch": 0.8298538622129437,
+ "grad_norm": 0.7666584849357605,
+ "learning_rate": 4.898621513140889e-06,
+ "loss": 0.6067,
+ "step": 265
+ },
+ {
+ "epoch": 0.8329853862212944,
+ "grad_norm": 0.8413611054420471,
+ "learning_rate": 4.897397459749113e-06,
+ "loss": 0.5985,
+ "step": 266
+ },
+ {
+ "epoch": 0.8361169102296451,
+ "grad_norm": 2.3374335765838623,
+ "learning_rate": 4.896166215757638e-06,
+ "loss": 0.5885,
+ "step": 267
+ },
+ {
+ "epoch": 0.8392484342379958,
+ "grad_norm": 2.236640214920044,
+ "learning_rate": 4.894927784859377e-06,
+ "loss": 0.6408,
+ "step": 268
+ },
+ {
+ "epoch": 0.8423799582463466,
+ "grad_norm": 0.9715856313705444,
+ "learning_rate": 4.893682170768802e-06,
+ "loss": 0.5954,
+ "step": 269
+ },
+ {
+ "epoch": 0.8455114822546973,
+ "grad_norm": 1.0249912738800049,
+ "learning_rate": 4.892429377221928e-06,
+ "loss": 0.6186,
+ "step": 270
+ },
+ {
+ "epoch": 0.848643006263048,
+ "grad_norm": 1.255426049232483,
+ "learning_rate": 4.891169407976302e-06,
+ "loss": 0.6351,
+ "step": 271
+ },
+ {
+ "epoch": 0.8517745302713987,
+ "grad_norm": 0.9339559674263,
+ "learning_rate": 4.889902266810995e-06,
+ "loss": 0.5944,
+ "step": 272
+ },
+ {
+ "epoch": 0.8549060542797495,
+ "grad_norm": 1.2473429441452026,
+ "learning_rate": 4.888627957526589e-06,
+ "loss": 0.544,
+ "step": 273
+ },
+ {
+ "epoch": 0.8580375782881002,
+ "grad_norm": 1.0589442253112793,
+ "learning_rate": 4.887346483945166e-06,
+ "loss": 0.5543,
+ "step": 274
+ },
+ {
+ "epoch": 0.8611691022964509,
+ "grad_norm": 0.9844024777412415,
+ "learning_rate": 4.886057849910294e-06,
+ "loss": 0.5941,
+ "step": 275
+ },
+ {
+ "epoch": 0.8643006263048016,
+ "grad_norm": 2.88578200340271,
+ "learning_rate": 4.8847620592870196e-06,
+ "loss": 0.6124,
+ "step": 276
+ },
+ {
+ "epoch": 0.8674321503131524,
+ "grad_norm": 0.7496054172515869,
+ "learning_rate": 4.8834591159618524e-06,
+ "loss": 0.6006,
+ "step": 277
+ },
+ {
+ "epoch": 0.8705636743215032,
+ "grad_norm": 0.7403052449226379,
+ "learning_rate": 4.88214902384276e-06,
+ "loss": 0.5911,
+ "step": 278
+ },
+ {
+ "epoch": 0.8736951983298539,
+ "grad_norm": 0.9003771543502808,
+ "learning_rate": 4.880831786859146e-06,
+ "loss": 0.6347,
+ "step": 279
+ },
+ {
+ "epoch": 0.8768267223382046,
+ "grad_norm": 1.0345501899719238,
+ "learning_rate": 4.879507408961847e-06,
+ "loss": 0.6111,
+ "step": 280
+ },
+ {
+ "epoch": 0.8799582463465553,
+ "grad_norm": 1.4385879039764404,
+ "learning_rate": 4.878175894123116e-06,
+ "loss": 0.6454,
+ "step": 281
+ },
+ {
+ "epoch": 0.8830897703549061,
+ "grad_norm": 0.8469482064247131,
+ "learning_rate": 4.8768372463366145e-06,
+ "loss": 0.6163,
+ "step": 282
+ },
+ {
+ "epoch": 0.8862212943632568,
+ "grad_norm": 0.8859589695930481,
+ "learning_rate": 4.875491469617395e-06,
+ "loss": 0.6144,
+ "step": 283
+ },
+ {
+ "epoch": 0.8893528183716075,
+ "grad_norm": 1.8436834812164307,
+ "learning_rate": 4.874138568001895e-06,
+ "loss": 0.6275,
+ "step": 284
+ },
+ {
+ "epoch": 0.8924843423799582,
+ "grad_norm": 0.6646101474761963,
+ "learning_rate": 4.87277854554792e-06,
+ "loss": 0.615,
+ "step": 285
+ },
+ {
+ "epoch": 0.8956158663883089,
+ "grad_norm": 1.0070925951004028,
+ "learning_rate": 4.871411406334633e-06,
+ "loss": 0.5898,
+ "step": 286
+ },
+ {
+ "epoch": 0.8987473903966597,
+ "grad_norm": 0.9785194993019104,
+ "learning_rate": 4.870037154462545e-06,
+ "loss": 0.5992,
+ "step": 287
+ },
+ {
+ "epoch": 0.9018789144050104,
+ "grad_norm": 0.7244889736175537,
+ "learning_rate": 4.868655794053497e-06,
+ "loss": 0.6078,
+ "step": 288
+ },
+ {
+ "epoch": 0.9050104384133612,
+ "grad_norm": 1.4496444463729858,
+ "learning_rate": 4.8672673292506535e-06,
+ "loss": 0.5855,
+ "step": 289
+ },
+ {
+ "epoch": 0.9081419624217119,
+ "grad_norm": 1.8514957427978516,
+ "learning_rate": 4.865871764218486e-06,
+ "loss": 0.5707,
+ "step": 290
+ },
+ {
+ "epoch": 0.9112734864300627,
+ "grad_norm": 0.8439773321151733,
+ "learning_rate": 4.864469103142763e-06,
+ "loss": 0.5562,
+ "step": 291
+ },
+ {
+ "epoch": 0.9144050104384134,
+ "grad_norm": 0.8146086931228638,
+ "learning_rate": 4.8630593502305355e-06,
+ "loss": 0.6161,
+ "step": 292
+ },
+ {
+ "epoch": 0.9175365344467641,
+ "grad_norm": 0.8920315504074097,
+ "learning_rate": 4.861642509710126e-06,
+ "loss": 0.6139,
+ "step": 293
+ },
+ {
+ "epoch": 0.9206680584551148,
+ "grad_norm": 1.4980088472366333,
+ "learning_rate": 4.860218585831116e-06,
+ "loss": 0.6187,
+ "step": 294
+ },
+ {
+ "epoch": 0.9237995824634656,
+ "grad_norm": 0.9910127520561218,
+ "learning_rate": 4.8587875828643285e-06,
+ "loss": 0.5852,
+ "step": 295
+ },
+ {
+ "epoch": 0.9269311064718163,
+ "grad_norm": 0.819600522518158,
+ "learning_rate": 4.857349505101823e-06,
+ "loss": 0.6172,
+ "step": 296
+ },
+ {
+ "epoch": 0.930062630480167,
+ "grad_norm": 1.1059772968292236,
+ "learning_rate": 4.855904356856878e-06,
+ "loss": 0.5868,
+ "step": 297
+ },
+ {
+ "epoch": 0.9331941544885177,
+ "grad_norm": 1.2362196445465088,
+ "learning_rate": 4.854452142463977e-06,
+ "loss": 0.625,
+ "step": 298
+ },
+ {
+ "epoch": 0.9363256784968684,
+ "grad_norm": 0.9956470727920532,
+ "learning_rate": 4.852992866278799e-06,
+ "loss": 0.5923,
+ "step": 299
+ },
+ {
+ "epoch": 0.9394572025052192,
+ "grad_norm": 0.864109218120575,
+ "learning_rate": 4.851526532678203e-06,
+ "loss": 0.6315,
+ "step": 300
+ },
+ {
+ "epoch": 0.94258872651357,
+ "grad_norm": 0.8900614380836487,
+ "learning_rate": 4.850053146060217e-06,
+ "loss": 0.6128,
+ "step": 301
+ },
+ {
+ "epoch": 0.9457202505219207,
+ "grad_norm": 0.927254855632782,
+ "learning_rate": 4.84857271084402e-06,
+ "loss": 0.5955,
+ "step": 302
+ },
+ {
+ "epoch": 0.9488517745302714,
+ "grad_norm": 1.0046517848968506,
+ "learning_rate": 4.847085231469935e-06,
+ "loss": 0.6134,
+ "step": 303
+ },
+ {
+ "epoch": 0.9519832985386222,
+ "grad_norm": 0.734597384929657,
+ "learning_rate": 4.8455907123994125e-06,
+ "loss": 0.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.9551148225469729,
+ "grad_norm": 0.7338348031044006,
+ "learning_rate": 4.844089158115016e-06,
+ "loss": 0.5897,
+ "step": 305
+ },
+ {
+ "epoch": 0.9582463465553236,
+ "grad_norm": 0.9163988828659058,
+ "learning_rate": 4.8425805731204106e-06,
+ "loss": 0.6051,
+ "step": 306
+ },
+ {
+ "epoch": 0.9613778705636743,
+ "grad_norm": 1.050246238708496,
+ "learning_rate": 4.84106496194035e-06,
+ "loss": 0.5751,
+ "step": 307
+ },
+ {
+ "epoch": 0.964509394572025,
+ "grad_norm": 0.7637603878974915,
+ "learning_rate": 4.83954232912066e-06,
+ "loss": 0.5677,
+ "step": 308
+ },
+ {
+ "epoch": 0.9676409185803758,
+ "grad_norm": 0.7110525965690613,
+ "learning_rate": 4.838012679228229e-06,
+ "loss": 0.6051,
+ "step": 309
+ },
+ {
+ "epoch": 0.9707724425887265,
+ "grad_norm": 0.7662068605422974,
+ "learning_rate": 4.836476016850988e-06,
+ "loss": 0.59,
+ "step": 310
+ },
+ {
+ "epoch": 0.9739039665970772,
+ "grad_norm": 0.8907375335693359,
+ "learning_rate": 4.834932346597906e-06,
+ "loss": 0.5792,
+ "step": 311
+ },
+ {
+ "epoch": 0.9770354906054279,
+ "grad_norm": 0.8939849138259888,
+ "learning_rate": 4.833381673098966e-06,
+ "loss": 0.6062,
+ "step": 312
+ },
+ {
+ "epoch": 0.9801670146137788,
+ "grad_norm": 0.8878788948059082,
+ "learning_rate": 4.8318240010051595e-06,
+ "loss": 0.5694,
+ "step": 313
+ },
+ {
+ "epoch": 0.9832985386221295,
+ "grad_norm": 1.2523870468139648,
+ "learning_rate": 4.830259334988468e-06,
+ "loss": 0.5809,
+ "step": 314
+ },
+ {
+ "epoch": 0.9864300626304802,
+ "grad_norm": 1.0836797952651978,
+ "learning_rate": 4.82868767974185e-06,
+ "loss": 0.5949,
+ "step": 315
+ },
+ {
+ "epoch": 0.9895615866388309,
+ "grad_norm": 0.7985473871231079,
+ "learning_rate": 4.827109039979226e-06,
+ "loss": 0.6057,
+ "step": 316
+ },
+ {
+ "epoch": 0.9926931106471816,
+ "grad_norm": 1.042951226234436,
+ "learning_rate": 4.825523420435469e-06,
+ "loss": 0.6004,
+ "step": 317
+ },
+ {
+ "epoch": 0.9958246346555324,
+ "grad_norm": 0.7845115661621094,
+ "learning_rate": 4.823930825866381e-06,
+ "loss": 0.6161,
+ "step": 318
+ },
+ {
+ "epoch": 0.9989561586638831,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.82233126104869e-06,
+ "loss": 0.5912,
+ "step": 319
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.8207247307800275e-06,
+ "loss": 0.1914,
+ "step": 320
+ },
+ {
+ "epoch": 1.0031315240083507,
+ "grad_norm": 0.751028835773468,
+ "learning_rate": 4.819111239878916e-06,
+ "loss": 0.5802,
+ "step": 321
+ },
+ {
+ "epoch": 1.0062630480167014,
+ "grad_norm": 1.4943569898605347,
+ "learning_rate": 4.817490793184758e-06,
+ "loss": 0.613,
+ "step": 322
+ },
+ {
+ "epoch": 1.0093945720250521,
+ "grad_norm": 2.296318531036377,
+ "learning_rate": 4.815863395557816e-06,
+ "loss": 0.5453,
+ "step": 323
+ },
+ {
+ "epoch": 1.0125260960334028,
+ "grad_norm": 0.760101318359375,
+ "learning_rate": 4.814229051879202e-06,
+ "loss": 0.5302,
+ "step": 324
+ },
+ {
+ "epoch": 1.0156576200417538,
+ "grad_norm": 0.8145846128463745,
+ "learning_rate": 4.812587767050861e-06,
+ "loss": 0.5831,
+ "step": 325
+ },
+ {
+ "epoch": 1.0187891440501045,
+ "grad_norm": 0.9169796109199524,
+ "learning_rate": 4.8109395459955565e-06,
+ "loss": 0.5756,
+ "step": 326
+ },
+ {
+ "epoch": 1.0219206680584552,
+ "grad_norm": 0.8791524171829224,
+ "learning_rate": 4.809284393656858e-06,
+ "loss": 0.5988,
+ "step": 327
+ },
+ {
+ "epoch": 1.0250521920668059,
+ "grad_norm": 1.0184170007705688,
+ "learning_rate": 4.807622314999122e-06,
+ "loss": 0.5476,
+ "step": 328
+ },
+ {
+ "epoch": 1.0281837160751566,
+ "grad_norm": 0.8095184564590454,
+ "learning_rate": 4.8059533150074805e-06,
+ "loss": 0.5723,
+ "step": 329
+ },
+ {
+ "epoch": 1.0313152400835073,
+ "grad_norm": 0.7621930241584778,
+ "learning_rate": 4.804277398687826e-06,
+ "loss": 0.5841,
+ "step": 330
+ },
+ {
+ "epoch": 1.034446764091858,
+ "grad_norm": 3.729628324508667,
+ "learning_rate": 4.802594571066791e-06,
+ "loss": 0.5639,
+ "step": 331
+ },
+ {
+ "epoch": 1.0375782881002087,
+ "grad_norm": 1.6502974033355713,
+ "learning_rate": 4.800904837191743e-06,
+ "loss": 0.6024,
+ "step": 332
+ },
+ {
+ "epoch": 1.0407098121085594,
+ "grad_norm": 0.8031198978424072,
+ "learning_rate": 4.799208202130762e-06,
+ "loss": 0.5305,
+ "step": 333
+ },
+ {
+ "epoch": 1.0438413361169103,
+ "grad_norm": 0.939644992351532,
+ "learning_rate": 4.797504670972623e-06,
+ "loss": 0.5446,
+ "step": 334
+ },
+ {
+ "epoch": 1.046972860125261,
+ "grad_norm": 1.0589954853057861,
+ "learning_rate": 4.795794248826789e-06,
+ "loss": 0.5366,
+ "step": 335
+ },
+ {
+ "epoch": 1.0501043841336117,
+ "grad_norm": 0.9089614748954773,
+ "learning_rate": 4.794076940823391e-06,
+ "loss": 0.5795,
+ "step": 336
+ },
+ {
+ "epoch": 1.0532359081419624,
+ "grad_norm": 0.7732561230659485,
+ "learning_rate": 4.792352752113212e-06,
+ "loss": 0.5765,
+ "step": 337
+ },
+ {
+ "epoch": 1.0563674321503131,
+ "grad_norm": 1.811553955078125,
+ "learning_rate": 4.790621687867672e-06,
+ "loss": 0.561,
+ "step": 338
+ },
+ {
+ "epoch": 1.0594989561586639,
+ "grad_norm": 1.1930758953094482,
+ "learning_rate": 4.788883753278813e-06,
+ "loss": 0.5,
+ "step": 339
+ },
+ {
+ "epoch": 1.0626304801670146,
+ "grad_norm": 0.9551813006401062,
+ "learning_rate": 4.787138953559285e-06,
+ "loss": 0.5228,
+ "step": 340
+ },
+ {
+ "epoch": 1.0657620041753653,
+ "grad_norm": 0.9609586596488953,
+ "learning_rate": 4.785387293942329e-06,
+ "loss": 0.5827,
+ "step": 341
+ },
+ {
+ "epoch": 1.068893528183716,
+ "grad_norm": 0.8403449654579163,
+ "learning_rate": 4.783628779681759e-06,
+ "loss": 0.5585,
+ "step": 342
+ },
+ {
+ "epoch": 1.072025052192067,
+ "grad_norm": 0.9108251929283142,
+ "learning_rate": 4.7818634160519496e-06,
+ "loss": 0.6077,
+ "step": 343
+ },
+ {
+ "epoch": 1.0751565762004176,
+ "grad_norm": 0.9476898908615112,
+ "learning_rate": 4.780091208347819e-06,
+ "loss": 0.5493,
+ "step": 344
+ },
+ {
+ "epoch": 1.0782881002087683,
+ "grad_norm": 1.1943707466125488,
+ "learning_rate": 4.778312161884813e-06,
+ "loss": 0.5736,
+ "step": 345
+ },
+ {
+ "epoch": 1.081419624217119,
+ "grad_norm": 3.1342639923095703,
+ "learning_rate": 4.77652628199889e-06,
+ "loss": 0.5765,
+ "step": 346
+ },
+ {
+ "epoch": 1.0845511482254697,
+ "grad_norm": 2.7982125282287598,
+ "learning_rate": 4.7747335740465015e-06,
+ "loss": 0.6003,
+ "step": 347
+ },
+ {
+ "epoch": 1.0876826722338204,
+ "grad_norm": 1.5068914890289307,
+ "learning_rate": 4.7729340434045815e-06,
+ "loss": 0.5033,
+ "step": 348
+ },
+ {
+ "epoch": 1.0908141962421711,
+ "grad_norm": 0.8273429274559021,
+ "learning_rate": 4.771127695470527e-06,
+ "loss": 0.5309,
+ "step": 349
+ },
+ {
+ "epoch": 1.0939457202505218,
+ "grad_norm": 1.104974389076233,
+ "learning_rate": 4.76931453566218e-06,
+ "loss": 0.5244,
+ "step": 350
+ },
+ {
+ "epoch": 1.0970772442588728,
+ "grad_norm": 1.096509337425232,
+ "learning_rate": 4.7674945694178166e-06,
+ "loss": 0.5585,
+ "step": 351
+ },
+ {
+ "epoch": 1.1002087682672235,
+ "grad_norm": 1.0238200426101685,
+ "learning_rate": 4.765667802196127e-06,
+ "loss": 0.5589,
+ "step": 352
+ },
+ {
+ "epoch": 1.1033402922755742,
+ "grad_norm": 0.7515526413917542,
+ "learning_rate": 4.763834239476197e-06,
+ "loss": 0.5304,
+ "step": 353
+ },
+ {
+ "epoch": 1.1064718162839249,
+ "grad_norm": 1.0282566547393799,
+ "learning_rate": 4.761993886757499e-06,
+ "loss": 0.5476,
+ "step": 354
+ },
+ {
+ "epoch": 1.1096033402922756,
+ "grad_norm": 0.9962708950042725,
+ "learning_rate": 4.760146749559868e-06,
+ "loss": 0.5117,
+ "step": 355
+ },
+ {
+ "epoch": 1.1127348643006263,
+ "grad_norm": 0.7851671576499939,
+ "learning_rate": 4.758292833423488e-06,
+ "loss": 0.5542,
+ "step": 356
+ },
+ {
+ "epoch": 1.115866388308977,
+ "grad_norm": 0.8857759237289429,
+ "learning_rate": 4.756432143908876e-06,
+ "loss": 0.544,
+ "step": 357
+ },
+ {
+ "epoch": 1.1189979123173277,
+ "grad_norm": 0.9402740597724915,
+ "learning_rate": 4.7545646865968645e-06,
+ "loss": 0.5656,
+ "step": 358
+ },
+ {
+ "epoch": 1.1221294363256784,
+ "grad_norm": 0.8210407495498657,
+ "learning_rate": 4.752690467088584e-06,
+ "loss": 0.5733,
+ "step": 359
+ },
+ {
+ "epoch": 1.1252609603340291,
+ "grad_norm": 0.795684278011322,
+ "learning_rate": 4.750809491005449e-06,
+ "loss": 0.5678,
+ "step": 360
+ },
+ {
+ "epoch": 1.12839248434238,
+ "grad_norm": 0.8712463974952698,
+ "learning_rate": 4.748921763989139e-06,
+ "loss": 0.5777,
+ "step": 361
+ },
+ {
+ "epoch": 1.1315240083507307,
+ "grad_norm": 0.9810119867324829,
+ "learning_rate": 4.747027291701578e-06,
+ "loss": 0.5511,
+ "step": 362
+ },
+ {
+ "epoch": 1.1346555323590815,
+ "grad_norm": 0.81117844581604,
+ "learning_rate": 4.745126079824926e-06,
+ "loss": 0.5038,
+ "step": 363
+ },
+ {
+ "epoch": 1.1377870563674322,
+ "grad_norm": 0.7631494402885437,
+ "learning_rate": 4.743218134061556e-06,
+ "loss": 0.6272,
+ "step": 364
+ },
+ {
+ "epoch": 1.1409185803757829,
+ "grad_norm": 0.7601696252822876,
+ "learning_rate": 4.741303460134038e-06,
+ "loss": 0.571,
+ "step": 365
+ },
+ {
+ "epoch": 1.1440501043841336,
+ "grad_norm": 1.7977744340896606,
+ "learning_rate": 4.7393820637851205e-06,
+ "loss": 0.538,
+ "step": 366
+ },
+ {
+ "epoch": 1.1471816283924843,
+ "grad_norm": 2.022578001022339,
+ "learning_rate": 4.737453950777718e-06,
+ "loss": 0.5822,
+ "step": 367
+ },
+ {
+ "epoch": 1.150313152400835,
+ "grad_norm": 0.7586764693260193,
+ "learning_rate": 4.735519126894885e-06,
+ "loss": 0.5986,
+ "step": 368
+ },
+ {
+ "epoch": 1.153444676409186,
+ "grad_norm": 0.8970286846160889,
+ "learning_rate": 4.733577597939812e-06,
+ "loss": 0.542,
+ "step": 369
+ },
+ {
+ "epoch": 1.1565762004175366,
+ "grad_norm": 0.8546352982521057,
+ "learning_rate": 4.731629369735793e-06,
+ "loss": 0.5832,
+ "step": 370
+ },
+ {
+ "epoch": 1.1597077244258873,
+ "grad_norm": 0.9266164898872375,
+ "learning_rate": 4.72967444812622e-06,
+ "loss": 0.551,
+ "step": 371
+ },
+ {
+ "epoch": 1.162839248434238,
+ "grad_norm": 1.0413658618927002,
+ "learning_rate": 4.7277128389745595e-06,
+ "loss": 0.5866,
+ "step": 372
+ },
+ {
+ "epoch": 1.1659707724425887,
+ "grad_norm": 0.9312199950218201,
+ "learning_rate": 4.7257445481643334e-06,
+ "loss": 0.5723,
+ "step": 373
+ },
+ {
+ "epoch": 1.1691022964509394,
+ "grad_norm": 0.7389806509017944,
+ "learning_rate": 4.723769581599109e-06,
+ "loss": 0.5209,
+ "step": 374
+ },
+ {
+ "epoch": 1.1722338204592901,
+ "grad_norm": 3.053169012069702,
+ "learning_rate": 4.721787945202472e-06,
+ "loss": 0.6094,
+ "step": 375
+ },
+ {
+ "epoch": 1.1753653444676408,
+ "grad_norm": 1.288589596748352,
+ "learning_rate": 4.719799644918017e-06,
+ "loss": 0.5616,
+ "step": 376
+ },
+ {
+ "epoch": 1.1784968684759916,
+ "grad_norm": 0.7675042152404785,
+ "learning_rate": 4.717804686709323e-06,
+ "loss": 0.4963,
+ "step": 377
+ },
+ {
+ "epoch": 1.1816283924843423,
+ "grad_norm": 0.7246491312980652,
+ "learning_rate": 4.715803076559938e-06,
+ "loss": 0.5273,
+ "step": 378
+ },
+ {
+ "epoch": 1.1847599164926932,
+ "grad_norm": 0.8193361759185791,
+ "learning_rate": 4.713794820473366e-06,
+ "loss": 0.6107,
+ "step": 379
+ },
+ {
+ "epoch": 1.187891440501044,
+ "grad_norm": 0.9498510360717773,
+ "learning_rate": 4.711779924473037e-06,
+ "loss": 0.5421,
+ "step": 380
+ },
+ {
+ "epoch": 1.1910229645093946,
+ "grad_norm": 1.0479756593704224,
+ "learning_rate": 4.709758394602305e-06,
+ "loss": 0.5257,
+ "step": 381
+ },
+ {
+ "epoch": 1.1941544885177453,
+ "grad_norm": 0.907866895198822,
+ "learning_rate": 4.707730236924413e-06,
+ "loss": 0.5289,
+ "step": 382
+ },
+ {
+ "epoch": 1.197286012526096,
+ "grad_norm": 0.8861165642738342,
+ "learning_rate": 4.705695457522488e-06,
+ "loss": 0.5727,
+ "step": 383
+ },
+ {
+ "epoch": 1.2004175365344467,
+ "grad_norm": 0.7467761039733887,
+ "learning_rate": 4.703654062499516e-06,
+ "loss": 0.5602,
+ "step": 384
+ },
+ {
+ "epoch": 1.2035490605427974,
+ "grad_norm": 0.7456198334693909,
+ "learning_rate": 4.701606057978325e-06,
+ "loss": 0.5345,
+ "step": 385
+ },
+ {
+ "epoch": 1.2066805845511483,
+ "grad_norm": 1.9976060390472412,
+ "learning_rate": 4.699551450101571e-06,
+ "loss": 0.5504,
+ "step": 386
+ },
+ {
+ "epoch": 1.209812108559499,
+ "grad_norm": 1.5253807306289673,
+ "learning_rate": 4.697490245031709e-06,
+ "loss": 0.5568,
+ "step": 387
+ },
+ {
+ "epoch": 1.2129436325678498,
+ "grad_norm": 1.0786075592041016,
+ "learning_rate": 4.6954224489509885e-06,
+ "loss": 0.5564,
+ "step": 388
+ },
+ {
+ "epoch": 1.2160751565762005,
+ "grad_norm": 0.8385995030403137,
+ "learning_rate": 4.693348068061422e-06,
+ "loss": 0.5341,
+ "step": 389
+ },
+ {
+ "epoch": 1.2192066805845512,
+ "grad_norm": 0.8184949159622192,
+ "learning_rate": 4.691267108584774e-06,
+ "loss": 0.5614,
+ "step": 390
+ },
+ {
+ "epoch": 1.2223382045929019,
+ "grad_norm": 0.9964898824691772,
+ "learning_rate": 4.68917957676254e-06,
+ "loss": 0.5589,
+ "step": 391
+ },
+ {
+ "epoch": 1.2254697286012526,
+ "grad_norm": 1.0168914794921875,
+ "learning_rate": 4.687085478855931e-06,
+ "loss": 0.5892,
+ "step": 392
+ },
+ {
+ "epoch": 1.2286012526096033,
+ "grad_norm": 0.8841140866279602,
+ "learning_rate": 4.684984821145846e-06,
+ "loss": 0.5327,
+ "step": 393
+ },
+ {
+ "epoch": 1.231732776617954,
+ "grad_norm": 0.834431529045105,
+ "learning_rate": 4.682877609932866e-06,
+ "loss": 0.5594,
+ "step": 394
+ },
+ {
+ "epoch": 1.2348643006263047,
+ "grad_norm": 0.7256641983985901,
+ "learning_rate": 4.6807638515372234e-06,
+ "loss": 0.5443,
+ "step": 395
+ },
+ {
+ "epoch": 1.2379958246346556,
+ "grad_norm": 0.765096127986908,
+ "learning_rate": 4.678643552298788e-06,
+ "loss": 0.5439,
+ "step": 396
+ },
+ {
+ "epoch": 1.2411273486430063,
+ "grad_norm": 0.8760455846786499,
+ "learning_rate": 4.676516718577051e-06,
+ "loss": 0.5485,
+ "step": 397
+ },
+ {
+ "epoch": 1.244258872651357,
+ "grad_norm": 2.7111501693725586,
+ "learning_rate": 4.674383356751099e-06,
+ "loss": 0.5696,
+ "step": 398
+ },
+ {
+ "epoch": 1.2473903966597077,
+ "grad_norm": 1.0521738529205322,
+ "learning_rate": 4.672243473219601e-06,
+ "loss": 0.5503,
+ "step": 399
+ },
+ {
+ "epoch": 1.2505219206680585,
+ "grad_norm": 0.8909669518470764,
+ "learning_rate": 4.670097074400785e-06,
+ "loss": 0.5183,
+ "step": 400
+ },
+ {
+ "epoch": 1.2536534446764092,
+ "grad_norm": 0.7483847737312317,
+ "learning_rate": 4.667944166732424e-06,
+ "loss": 0.5669,
+ "step": 401
+ },
+ {
+ "epoch": 1.2567849686847599,
+ "grad_norm": 1.146997094154358,
+ "learning_rate": 4.665784756671808e-06,
+ "loss": 0.5464,
+ "step": 402
+ },
+ {
+ "epoch": 1.2599164926931106,
+ "grad_norm": 0.8998096585273743,
+ "learning_rate": 4.663618850695733e-06,
+ "loss": 0.5502,
+ "step": 403
+ },
+ {
+ "epoch": 1.2630480167014615,
+ "grad_norm": 0.8882688283920288,
+ "learning_rate": 4.6614464553004795e-06,
+ "loss": 0.5507,
+ "step": 404
+ },
+ {
+ "epoch": 1.2661795407098122,
+ "grad_norm": 0.8310684561729431,
+ "learning_rate": 4.659267577001789e-06,
+ "loss": 0.5164,
+ "step": 405
+ },
+ {
+ "epoch": 1.269311064718163,
+ "grad_norm": 0.9286114573478699,
+ "learning_rate": 4.657082222334851e-06,
+ "loss": 0.4813,
+ "step": 406
+ },
+ {
+ "epoch": 1.2724425887265136,
+ "grad_norm": 1.2394906282424927,
+ "learning_rate": 4.654890397854275e-06,
+ "loss": 0.5837,
+ "step": 407
+ },
+ {
+ "epoch": 1.2755741127348643,
+ "grad_norm": 4.00585412979126,
+ "learning_rate": 4.652692110134079e-06,
+ "loss": 0.5453,
+ "step": 408
+ },
+ {
+ "epoch": 1.278705636743215,
+ "grad_norm": 1.1667803525924683,
+ "learning_rate": 4.650487365767667e-06,
+ "loss": 0.5652,
+ "step": 409
+ },
+ {
+ "epoch": 1.2818371607515657,
+ "grad_norm": 0.9351289868354797,
+ "learning_rate": 4.648276171367807e-06,
+ "loss": 0.5576,
+ "step": 410
+ },
+ {
+ "epoch": 1.2849686847599164,
+ "grad_norm": 0.8107728958129883,
+ "learning_rate": 4.646058533566614e-06,
+ "loss": 0.5821,
+ "step": 411
+ },
+ {
+ "epoch": 1.2881002087682671,
+ "grad_norm": 0.7293011546134949,
+ "learning_rate": 4.643834459015525e-06,
+ "loss": 0.5363,
+ "step": 412
+ },
+ {
+ "epoch": 1.2912317327766178,
+ "grad_norm": 0.7550690770149231,
+ "learning_rate": 4.641603954385289e-06,
+ "loss": 0.53,
+ "step": 413
+ },
+ {
+ "epoch": 1.2943632567849686,
+ "grad_norm": 0.7626177072525024,
+ "learning_rate": 4.639367026365938e-06,
+ "loss": 0.5307,
+ "step": 414
+ },
+ {
+ "epoch": 1.2974947807933195,
+ "grad_norm": 1.0841096639633179,
+ "learning_rate": 4.637123681666769e-06,
+ "loss": 0.5162,
+ "step": 415
+ },
+ {
+ "epoch": 1.3006263048016702,
+ "grad_norm": 0.8814271092414856,
+ "learning_rate": 4.634873927016326e-06,
+ "loss": 0.5369,
+ "step": 416
+ },
+ {
+ "epoch": 1.303757828810021,
+ "grad_norm": 0.7402971982955933,
+ "learning_rate": 4.632617769162378e-06,
+ "loss": 0.5846,
+ "step": 417
+ },
+ {
+ "epoch": 1.3068893528183716,
+ "grad_norm": 0.8106061220169067,
+ "learning_rate": 4.6303552148719e-06,
+ "loss": 0.5289,
+ "step": 418
+ },
+ {
+ "epoch": 1.3100208768267223,
+ "grad_norm": 0.9241361618041992,
+ "learning_rate": 4.628086270931053e-06,
+ "loss": 0.5714,
+ "step": 419
+ },
+ {
+ "epoch": 1.313152400835073,
+ "grad_norm": 0.950332522392273,
+ "learning_rate": 4.625810944145159e-06,
+ "loss": 0.5817,
+ "step": 420
+ },
+ {
+ "epoch": 1.316283924843424,
+ "grad_norm": 0.9037718772888184,
+ "learning_rate": 4.623529241338689e-06,
+ "loss": 0.5902,
+ "step": 421
+ },
+ {
+ "epoch": 1.3194154488517746,
+ "grad_norm": 1.2110658884048462,
+ "learning_rate": 4.621241169355234e-06,
+ "loss": 0.561,
+ "step": 422
+ },
+ {
+ "epoch": 1.3225469728601253,
+ "grad_norm": 0.8582742214202881,
+ "learning_rate": 4.618946735057491e-06,
+ "loss": 0.5003,
+ "step": 423
+ },
+ {
+ "epoch": 1.325678496868476,
+ "grad_norm": 0.9203405976295471,
+ "learning_rate": 4.6166459453272386e-06,
+ "loss": 0.5639,
+ "step": 424
+ },
+ {
+ "epoch": 1.3288100208768268,
+ "grad_norm": 0.933721125125885,
+ "learning_rate": 4.614338807065317e-06,
+ "loss": 0.5766,
+ "step": 425
+ },
+ {
+ "epoch": 1.3319415448851775,
+ "grad_norm": 0.8435131311416626,
+ "learning_rate": 4.612025327191608e-06,
+ "loss": 0.5656,
+ "step": 426
+ },
+ {
+ "epoch": 1.3350730688935282,
+ "grad_norm": 0.795796811580658,
+ "learning_rate": 4.609705512645015e-06,
+ "loss": 0.4996,
+ "step": 427
+ },
+ {
+ "epoch": 1.3382045929018789,
+ "grad_norm": 0.8168228268623352,
+ "learning_rate": 4.6073793703834404e-06,
+ "loss": 0.5465,
+ "step": 428
+ },
+ {
+ "epoch": 1.3413361169102296,
+ "grad_norm": 0.8795569539070129,
+ "learning_rate": 4.605046907383765e-06,
+ "loss": 0.5407,
+ "step": 429
+ },
+ {
+ "epoch": 1.3444676409185803,
+ "grad_norm": 0.8504094481468201,
+ "learning_rate": 4.6027081306418295e-06,
+ "loss": 0.5589,
+ "step": 430
+ },
+ {
+ "epoch": 1.347599164926931,
+ "grad_norm": 1.485202431678772,
+ "learning_rate": 4.600363047172409e-06,
+ "loss": 0.5515,
+ "step": 431
+ },
+ {
+ "epoch": 1.350730688935282,
+ "grad_norm": 1.1156851053237915,
+ "learning_rate": 4.598011664009197e-06,
+ "loss": 0.5681,
+ "step": 432
+ },
+ {
+ "epoch": 1.3538622129436326,
+ "grad_norm": 0.8666876554489136,
+ "learning_rate": 4.595653988204779e-06,
+ "loss": 0.5451,
+ "step": 433
+ },
+ {
+ "epoch": 1.3569937369519833,
+ "grad_norm": 0.8192381858825684,
+ "learning_rate": 4.593290026830619e-06,
+ "loss": 0.5632,
+ "step": 434
+ },
+ {
+ "epoch": 1.360125260960334,
+ "grad_norm": 0.7994804978370667,
+ "learning_rate": 4.590919786977029e-06,
+ "loss": 0.5181,
+ "step": 435
+ },
+ {
+ "epoch": 1.3632567849686847,
+ "grad_norm": 0.8038607835769653,
+ "learning_rate": 4.5885432757531535e-06,
+ "loss": 0.5385,
+ "step": 436
+ },
+ {
+ "epoch": 1.3663883089770354,
+ "grad_norm": 0.7677503824234009,
+ "learning_rate": 4.586160500286948e-06,
+ "loss": 0.5455,
+ "step": 437
+ },
+ {
+ "epoch": 1.3695198329853862,
+ "grad_norm": 0.8293285369873047,
+ "learning_rate": 4.583771467725157e-06,
+ "loss": 0.5401,
+ "step": 438
+ },
+ {
+ "epoch": 1.372651356993737,
+ "grad_norm": 0.8607680797576904,
+ "learning_rate": 4.581376185233289e-06,
+ "loss": 0.5782,
+ "step": 439
+ },
+ {
+ "epoch": 1.3757828810020878,
+ "grad_norm": 0.8847081065177917,
+ "learning_rate": 4.578974659995601e-06,
+ "loss": 0.572,
+ "step": 440
+ },
+ {
+ "epoch": 1.3789144050104385,
+ "grad_norm": 0.7669641971588135,
+ "learning_rate": 4.576566899215075e-06,
+ "loss": 0.5655,
+ "step": 441
+ },
+ {
+ "epoch": 1.3820459290187892,
+ "grad_norm": 0.8514629006385803,
+ "learning_rate": 4.5741529101133904e-06,
+ "loss": 0.5218,
+ "step": 442
+ },
+ {
+ "epoch": 1.38517745302714,
+ "grad_norm": 0.8719842433929443,
+ "learning_rate": 4.5717326999309145e-06,
+ "loss": 0.5579,
+ "step": 443
+ },
+ {
+ "epoch": 1.3883089770354906,
+ "grad_norm": 1.1142809391021729,
+ "learning_rate": 4.569306275926667e-06,
+ "loss": 0.5535,
+ "step": 444
+ },
+ {
+ "epoch": 1.3914405010438413,
+ "grad_norm": 0.7392387986183167,
+ "learning_rate": 4.566873645378309e-06,
+ "loss": 0.5335,
+ "step": 445
+ },
+ {
+ "epoch": 1.394572025052192,
+ "grad_norm": 0.9066658616065979,
+ "learning_rate": 4.564434815582117e-06,
+ "loss": 0.5286,
+ "step": 446
+ },
+ {
+ "epoch": 1.3977035490605427,
+ "grad_norm": 0.8648932576179504,
+ "learning_rate": 4.561989793852959e-06,
+ "loss": 0.5008,
+ "step": 447
+ },
+ {
+ "epoch": 1.4008350730688934,
+ "grad_norm": 0.7768712043762207,
+ "learning_rate": 4.559538587524276e-06,
+ "loss": 0.5727,
+ "step": 448
+ },
+ {
+ "epoch": 1.4039665970772441,
+ "grad_norm": 0.7851182222366333,
+ "learning_rate": 4.557081203948059e-06,
+ "loss": 0.5731,
+ "step": 449
+ },
+ {
+ "epoch": 1.407098121085595,
+ "grad_norm": 0.8959861397743225,
+ "learning_rate": 4.5546176504948255e-06,
+ "loss": 0.5587,
+ "step": 450
+ },
+ {
+ "epoch": 1.4102296450939458,
+ "grad_norm": 1.0538026094436646,
+ "learning_rate": 4.552147934553601e-06,
+ "loss": 0.5808,
+ "step": 451
+ },
+ {
+ "epoch": 1.4133611691022965,
+ "grad_norm": 0.9887629151344299,
+ "learning_rate": 4.54967206353189e-06,
+ "loss": 0.5658,
+ "step": 452
+ },
+ {
+ "epoch": 1.4164926931106472,
+ "grad_norm": 0.9579302072525024,
+ "learning_rate": 4.547190044855663e-06,
+ "loss": 0.5092,
+ "step": 453
+ },
+ {
+ "epoch": 1.4196242171189979,
+ "grad_norm": 0.6993522047996521,
+ "learning_rate": 4.544701885969326e-06,
+ "loss": 0.5233,
+ "step": 454
+ },
+ {
+ "epoch": 1.4227557411273486,
+ "grad_norm": 0.8197568655014038,
+ "learning_rate": 4.542207594335703e-06,
+ "loss": 0.553,
+ "step": 455
+ },
+ {
+ "epoch": 1.4258872651356993,
+ "grad_norm": 2.921947717666626,
+ "learning_rate": 4.53970717743601e-06,
+ "loss": 0.4857,
+ "step": 456
+ },
+ {
+ "epoch": 1.4290187891440502,
+ "grad_norm": 1.3547242879867554,
+ "learning_rate": 4.53720064276984e-06,
+ "loss": 0.5676,
+ "step": 457
+ },
+ {
+ "epoch": 1.432150313152401,
+ "grad_norm": 1.4175567626953125,
+ "learning_rate": 4.534687997855131e-06,
+ "loss": 0.5164,
+ "step": 458
+ },
+ {
+ "epoch": 1.4352818371607516,
+ "grad_norm": 1.378146767616272,
+ "learning_rate": 4.532169250228145e-06,
+ "loss": 0.5429,
+ "step": 459
+ },
+ {
+ "epoch": 1.4384133611691023,
+ "grad_norm": 0.7811698317527771,
+ "learning_rate": 4.529644407443456e-06,
+ "loss": 0.524,
+ "step": 460
+ },
+ {
+ "epoch": 1.441544885177453,
+ "grad_norm": 1.1481678485870361,
+ "learning_rate": 4.527113477073914e-06,
+ "loss": 0.5513,
+ "step": 461
+ },
+ {
+ "epoch": 1.4446764091858038,
+ "grad_norm": 0.8450161218643188,
+ "learning_rate": 4.5245764667106266e-06,
+ "loss": 0.5632,
+ "step": 462
+ },
+ {
+ "epoch": 1.4478079331941545,
+ "grad_norm": 1.1582145690917969,
+ "learning_rate": 4.522033383962941e-06,
+ "loss": 0.5834,
+ "step": 463
+ },
+ {
+ "epoch": 1.4509394572025052,
+ "grad_norm": 1.0403447151184082,
+ "learning_rate": 4.519484236458416e-06,
+ "loss": 0.506,
+ "step": 464
+ },
+ {
+ "epoch": 1.4540709812108559,
+ "grad_norm": 0.7894920706748962,
+ "learning_rate": 4.516929031842799e-06,
+ "loss": 0.5526,
+ "step": 465
+ },
+ {
+ "epoch": 1.4572025052192066,
+ "grad_norm": 0.8092262744903564,
+ "learning_rate": 4.51436777778001e-06,
+ "loss": 0.5619,
+ "step": 466
+ },
+ {
+ "epoch": 1.4603340292275573,
+ "grad_norm": 0.9773806929588318,
+ "learning_rate": 4.511800481952106e-06,
+ "loss": 0.5179,
+ "step": 467
+ },
+ {
+ "epoch": 1.4634655532359082,
+ "grad_norm": 1.018676519393921,
+ "learning_rate": 4.509227152059271e-06,
+ "loss": 0.5415,
+ "step": 468
+ },
+ {
+ "epoch": 1.466597077244259,
+ "grad_norm": 0.7457838654518127,
+ "learning_rate": 4.506647795819784e-06,
+ "loss": 0.5473,
+ "step": 469
+ },
+ {
+ "epoch": 1.4697286012526096,
+ "grad_norm": 0.7826436161994934,
+ "learning_rate": 4.50406242097e-06,
+ "loss": 0.5526,
+ "step": 470
+ },
+ {
+ "epoch": 1.4728601252609603,
+ "grad_norm": 0.9492483139038086,
+ "learning_rate": 4.501471035264328e-06,
+ "loss": 0.5179,
+ "step": 471
+ },
+ {
+ "epoch": 1.475991649269311,
+ "grad_norm": 0.93398517370224,
+ "learning_rate": 4.4988736464752005e-06,
+ "loss": 0.5195,
+ "step": 472
+ },
+ {
+ "epoch": 1.4791231732776617,
+ "grad_norm": 0.8396487832069397,
+ "learning_rate": 4.496270262393061e-06,
+ "loss": 0.5447,
+ "step": 473
+ },
+ {
+ "epoch": 1.4822546972860124,
+ "grad_norm": 0.7450584173202515,
+ "learning_rate": 4.4936608908263315e-06,
+ "loss": 0.5207,
+ "step": 474
+ },
+ {
+ "epoch": 1.4853862212943634,
+ "grad_norm": 0.7887717485427856,
+ "learning_rate": 4.491045539601392e-06,
+ "loss": 0.523,
+ "step": 475
+ },
+ {
+ "epoch": 1.488517745302714,
+ "grad_norm": 1.2051388025283813,
+ "learning_rate": 4.48842421656256e-06,
+ "loss": 0.5402,
+ "step": 476
+ },
+ {
+ "epoch": 1.4916492693110648,
+ "grad_norm": 2.3103389739990234,
+ "learning_rate": 4.485796929572063e-06,
+ "loss": 0.5588,
+ "step": 477
+ },
+ {
+ "epoch": 1.4947807933194155,
+ "grad_norm": 0.7473112344741821,
+ "learning_rate": 4.483163686510016e-06,
+ "loss": 0.5731,
+ "step": 478
+ },
+ {
+ "epoch": 1.4979123173277662,
+ "grad_norm": 0.7545126676559448,
+ "learning_rate": 4.480524495274399e-06,
+ "loss": 0.5536,
+ "step": 479
+ },
+ {
+ "epoch": 1.501043841336117,
+ "grad_norm": 0.7801297903060913,
+ "learning_rate": 4.477879363781033e-06,
+ "loss": 0.5696,
+ "step": 480
+ },
+ {
+ "epoch": 1.5041753653444676,
+ "grad_norm": 0.7740563750267029,
+ "learning_rate": 4.475228299963554e-06,
+ "loss": 0.5526,
+ "step": 481
+ },
+ {
+ "epoch": 1.5073068893528183,
+ "grad_norm": 0.8600060343742371,
+ "learning_rate": 4.4725713117733936e-06,
+ "loss": 0.5051,
+ "step": 482
+ },
+ {
+ "epoch": 1.510438413361169,
+ "grad_norm": 0.6934283971786499,
+ "learning_rate": 4.46990840717975e-06,
+ "loss": 0.5564,
+ "step": 483
+ },
+ {
+ "epoch": 1.5135699373695197,
+ "grad_norm": 0.8927920460700989,
+ "learning_rate": 4.46723959416957e-06,
+ "loss": 0.5529,
+ "step": 484
+ },
+ {
+ "epoch": 1.5167014613778704,
+ "grad_norm": 0.9570988416671753,
+ "learning_rate": 4.464564880747517e-06,
+ "loss": 0.5661,
+ "step": 485
+ },
+ {
+ "epoch": 1.5198329853862211,
+ "grad_norm": 0.7229202389717102,
+ "learning_rate": 4.461884274935956e-06,
+ "loss": 0.5964,
+ "step": 486
+ },
+ {
+ "epoch": 1.522964509394572,
+ "grad_norm": 0.7367239594459534,
+ "learning_rate": 4.4591977847749225e-06,
+ "loss": 0.5455,
+ "step": 487
+ },
+ {
+ "epoch": 1.5260960334029228,
+ "grad_norm": 0.8062120676040649,
+ "learning_rate": 4.456505418322103e-06,
+ "loss": 0.5735,
+ "step": 488
+ },
+ {
+ "epoch": 1.5292275574112735,
+ "grad_norm": 0.8854482769966125,
+ "learning_rate": 4.453807183652808e-06,
+ "loss": 0.5421,
+ "step": 489
+ },
+ {
+ "epoch": 1.5323590814196242,
+ "grad_norm": 0.7518959045410156,
+ "learning_rate": 4.451103088859951e-06,
+ "loss": 0.5083,
+ "step": 490
+ },
+ {
+ "epoch": 1.535490605427975,
+ "grad_norm": 0.8621206879615784,
+ "learning_rate": 4.448393142054016e-06,
+ "loss": 0.4712,
+ "step": 491
+ },
+ {
+ "epoch": 1.5386221294363258,
+ "grad_norm": 1.0618741512298584,
+ "learning_rate": 4.445677351363046e-06,
+ "loss": 0.5808,
+ "step": 492
+ },
+ {
+ "epoch": 1.5417536534446765,
+ "grad_norm": 0.8261345028877258,
+ "learning_rate": 4.442955724932607e-06,
+ "loss": 0.5625,
+ "step": 493
+ },
+ {
+ "epoch": 1.5448851774530272,
+ "grad_norm": 0.7067139744758606,
+ "learning_rate": 4.440228270925772e-06,
+ "loss": 0.5661,
+ "step": 494
+ },
+ {
+ "epoch": 1.548016701461378,
+ "grad_norm": 0.9234416484832764,
+ "learning_rate": 4.437494997523091e-06,
+ "loss": 0.5428,
+ "step": 495
+ },
+ {
+ "epoch": 1.5511482254697286,
+ "grad_norm": 0.9273470044136047,
+ "learning_rate": 4.434755912922567e-06,
+ "loss": 0.5388,
+ "step": 496
+ },
+ {
+ "epoch": 1.5542797494780793,
+ "grad_norm": 1.0163263082504272,
+ "learning_rate": 4.4320110253396345e-06,
+ "loss": 0.5409,
+ "step": 497
+ },
+ {
+ "epoch": 1.55741127348643,
+ "grad_norm": 0.9542096853256226,
+ "learning_rate": 4.429260343007133e-06,
+ "loss": 0.5329,
+ "step": 498
+ },
+ {
+ "epoch": 1.5605427974947808,
+ "grad_norm": 0.8076801896095276,
+ "learning_rate": 4.426503874175283e-06,
+ "loss": 0.5616,
+ "step": 499
+ },
+ {
+ "epoch": 1.5636743215031315,
+ "grad_norm": 1.0063767433166504,
+ "learning_rate": 4.423741627111658e-06,
+ "loss": 0.5369,
+ "step": 500
+ },
+ {
+ "epoch": 1.5668058455114822,
+ "grad_norm": 1.040286898612976,
+ "learning_rate": 4.420973610101166e-06,
+ "loss": 0.5474,
+ "step": 501
+ },
+ {
+ "epoch": 1.5699373695198329,
+ "grad_norm": 0.7832860946655273,
+ "learning_rate": 4.4181998314460164e-06,
+ "loss": 0.5486,
+ "step": 502
+ },
+ {
+ "epoch": 1.5730688935281836,
+ "grad_norm": 0.8162257075309753,
+ "learning_rate": 4.415420299465706e-06,
+ "loss": 0.5054,
+ "step": 503
+ },
+ {
+ "epoch": 1.5762004175365343,
+ "grad_norm": 0.9108433127403259,
+ "learning_rate": 4.4126350224969814e-06,
+ "loss": 0.5399,
+ "step": 504
+ },
+ {
+ "epoch": 1.5793319415448852,
+ "grad_norm": 0.8002520799636841,
+ "learning_rate": 4.409844008893824e-06,
+ "loss": 0.5485,
+ "step": 505
+ },
+ {
+ "epoch": 1.582463465553236,
+ "grad_norm": 0.8543248772621155,
+ "learning_rate": 4.407047267027423e-06,
+ "loss": 0.4984,
+ "step": 506
+ },
+ {
+ "epoch": 1.5855949895615866,
+ "grad_norm": 0.7154155373573303,
+ "learning_rate": 4.404244805286141e-06,
+ "loss": 0.5392,
+ "step": 507
+ },
+ {
+ "epoch": 1.5887265135699373,
+ "grad_norm": 0.818553626537323,
+ "learning_rate": 4.401436632075504e-06,
+ "loss": 0.5178,
+ "step": 508
+ },
+ {
+ "epoch": 1.5918580375782883,
+ "grad_norm": 0.7535017728805542,
+ "learning_rate": 4.398622755818167e-06,
+ "loss": 0.5446,
+ "step": 509
+ },
+ {
+ "epoch": 1.594989561586639,
+ "grad_norm": 0.9328975677490234,
+ "learning_rate": 4.395803184953889e-06,
+ "loss": 0.5546,
+ "step": 510
+ },
+ {
+ "epoch": 1.5981210855949897,
+ "grad_norm": 0.7960026860237122,
+ "learning_rate": 4.392977927939508e-06,
+ "loss": 0.5451,
+ "step": 511
+ },
+ {
+ "epoch": 1.6012526096033404,
+ "grad_norm": 0.9686267971992493,
+ "learning_rate": 4.3901469932489195e-06,
+ "loss": 0.5198,
+ "step": 512
+ },
+ {
+ "epoch": 1.604384133611691,
+ "grad_norm": 0.903137743473053,
+ "learning_rate": 4.387310389373047e-06,
+ "loss": 0.5395,
+ "step": 513
+ },
+ {
+ "epoch": 1.6075156576200418,
+ "grad_norm": 1.0728516578674316,
+ "learning_rate": 4.384468124819816e-06,
+ "loss": 0.5843,
+ "step": 514
+ },
+ {
+ "epoch": 1.6106471816283925,
+ "grad_norm": 1.0245436429977417,
+ "learning_rate": 4.3816202081141345e-06,
+ "loss": 0.5672,
+ "step": 515
+ },
+ {
+ "epoch": 1.6137787056367432,
+ "grad_norm": 0.9672732353210449,
+ "learning_rate": 4.378766647797858e-06,
+ "loss": 0.5369,
+ "step": 516
+ },
+ {
+ "epoch": 1.616910229645094,
+ "grad_norm": 0.9149513840675354,
+ "learning_rate": 4.375907452429774e-06,
+ "loss": 0.4628,
+ "step": 517
+ },
+ {
+ "epoch": 1.6200417536534446,
+ "grad_norm": 0.7543843984603882,
+ "learning_rate": 4.373042630585567e-06,
+ "loss": 0.5344,
+ "step": 518
+ },
+ {
+ "epoch": 1.6231732776617953,
+ "grad_norm": 0.7589017152786255,
+ "learning_rate": 4.370172190857801e-06,
+ "loss": 0.5672,
+ "step": 519
+ },
+ {
+ "epoch": 1.626304801670146,
+ "grad_norm": 0.803040623664856,
+ "learning_rate": 4.367296141855887e-06,
+ "loss": 0.5313,
+ "step": 520
+ },
+ {
+ "epoch": 1.6294363256784967,
+ "grad_norm": 0.8305794596672058,
+ "learning_rate": 4.3644144922060625e-06,
+ "loss": 0.5754,
+ "step": 521
+ },
+ {
+ "epoch": 1.6325678496868476,
+ "grad_norm": 1.0086486339569092,
+ "learning_rate": 4.361527250551361e-06,
+ "loss": 0.5433,
+ "step": 522
+ },
+ {
+ "epoch": 1.6356993736951984,
+ "grad_norm": 0.7217550277709961,
+ "learning_rate": 4.35863442555159e-06,
+ "loss": 0.524,
+ "step": 523
+ },
+ {
+ "epoch": 1.638830897703549,
+ "grad_norm": 0.7788524627685547,
+ "learning_rate": 4.355736025883303e-06,
+ "loss": 0.536,
+ "step": 524
+ },
+ {
+ "epoch": 1.6419624217118998,
+ "grad_norm": 0.8460550904273987,
+ "learning_rate": 4.352832060239774e-06,
+ "loss": 0.5381,
+ "step": 525
+ },
+ {
+ "epoch": 1.6450939457202505,
+ "grad_norm": 0.7571215033531189,
+ "learning_rate": 4.3499225373309675e-06,
+ "loss": 0.541,
+ "step": 526
+ },
+ {
+ "epoch": 1.6482254697286014,
+ "grad_norm": 0.7343226671218872,
+ "learning_rate": 4.347007465883523e-06,
+ "loss": 0.5147,
+ "step": 527
+ },
+ {
+ "epoch": 1.651356993736952,
+ "grad_norm": 0.7271892428398132,
+ "learning_rate": 4.3440868546407165e-06,
+ "loss": 0.5311,
+ "step": 528
+ },
+ {
+ "epoch": 1.6544885177453028,
+ "grad_norm": 0.8166136741638184,
+ "learning_rate": 4.341160712362442e-06,
+ "loss": 0.5379,
+ "step": 529
+ },
+ {
+ "epoch": 1.6576200417536535,
+ "grad_norm": 1.5985233783721924,
+ "learning_rate": 4.338229047825182e-06,
+ "loss": 0.5782,
+ "step": 530
+ },
+ {
+ "epoch": 1.6607515657620042,
+ "grad_norm": 0.7835702896118164,
+ "learning_rate": 4.3352918698219835e-06,
+ "loss": 0.525,
+ "step": 531
+ },
+ {
+ "epoch": 1.663883089770355,
+ "grad_norm": 0.7278687953948975,
+ "learning_rate": 4.332349187162428e-06,
+ "loss": 0.5266,
+ "step": 532
+ },
+ {
+ "epoch": 1.6670146137787056,
+ "grad_norm": 0.8240190148353577,
+ "learning_rate": 4.329401008672608e-06,
+ "loss": 0.5515,
+ "step": 533
+ },
+ {
+ "epoch": 1.6701461377870563,
+ "grad_norm": 0.9447080492973328,
+ "learning_rate": 4.326447343195102e-06,
+ "loss": 0.5596,
+ "step": 534
+ },
+ {
+ "epoch": 1.673277661795407,
+ "grad_norm": 0.7827372550964355,
+ "learning_rate": 4.323488199588944e-06,
+ "loss": 0.5466,
+ "step": 535
+ },
+ {
+ "epoch": 1.6764091858037578,
+ "grad_norm": 0.9252517223358154,
+ "learning_rate": 4.320523586729599e-06,
+ "loss": 0.5433,
+ "step": 536
+ },
+ {
+ "epoch": 1.6795407098121085,
+ "grad_norm": 0.9437504410743713,
+ "learning_rate": 4.317553513508934e-06,
+ "loss": 0.5552,
+ "step": 537
+ },
+ {
+ "epoch": 1.6826722338204592,
+ "grad_norm": 0.8972746133804321,
+ "learning_rate": 4.3145779888351986e-06,
+ "loss": 0.5259,
+ "step": 538
+ },
+ {
+ "epoch": 1.6858037578288099,
+ "grad_norm": 0.8017446994781494,
+ "learning_rate": 4.311597021632988e-06,
+ "loss": 0.5263,
+ "step": 539
+ },
+ {
+ "epoch": 1.6889352818371608,
+ "grad_norm": 0.7875497341156006,
+ "learning_rate": 4.3086106208432235e-06,
+ "loss": 0.5316,
+ "step": 540
+ },
+ {
+ "epoch": 1.6920668058455115,
+ "grad_norm": 0.8204905986785889,
+ "learning_rate": 4.305618795423125e-06,
+ "loss": 0.5506,
+ "step": 541
+ },
+ {
+ "epoch": 1.6951983298538622,
+ "grad_norm": 0.888359785079956,
+ "learning_rate": 4.30262155434618e-06,
+ "loss": 0.4825,
+ "step": 542
+ },
+ {
+ "epoch": 1.698329853862213,
+ "grad_norm": 1.1026058197021484,
+ "learning_rate": 4.29961890660212e-06,
+ "loss": 0.5321,
+ "step": 543
+ },
+ {
+ "epoch": 1.7014613778705638,
+ "grad_norm": 0.7662535905838013,
+ "learning_rate": 4.2966108611968945e-06,
+ "loss": 0.5432,
+ "step": 544
+ },
+ {
+ "epoch": 1.7045929018789145,
+ "grad_norm": 1.1951749324798584,
+ "learning_rate": 4.293597427152641e-06,
+ "loss": 0.5123,
+ "step": 545
+ },
+ {
+ "epoch": 1.7077244258872653,
+ "grad_norm": 1.303183913230896,
+ "learning_rate": 4.290578613507661e-06,
+ "loss": 0.5346,
+ "step": 546
+ },
+ {
+ "epoch": 1.710855949895616,
+ "grad_norm": 0.7653357982635498,
+ "learning_rate": 4.287554429316387e-06,
+ "loss": 0.5397,
+ "step": 547
+ },
+ {
+ "epoch": 1.7139874739039667,
+ "grad_norm": 0.796215295791626,
+ "learning_rate": 4.284524883649366e-06,
+ "loss": 0.5421,
+ "step": 548
+ },
+ {
+ "epoch": 1.7171189979123174,
+ "grad_norm": 0.7599332332611084,
+ "learning_rate": 4.281489985593219e-06,
+ "loss": 0.5289,
+ "step": 549
+ },
+ {
+ "epoch": 1.720250521920668,
+ "grad_norm": 0.8029115796089172,
+ "learning_rate": 4.2784497442506265e-06,
+ "loss": 0.5409,
+ "step": 550
+ },
+ {
+ "epoch": 1.7233820459290188,
+ "grad_norm": 0.7194099426269531,
+ "learning_rate": 4.275404168740291e-06,
+ "loss": 0.5327,
+ "step": 551
+ },
+ {
+ "epoch": 1.7265135699373695,
+ "grad_norm": 0.7960740923881531,
+ "learning_rate": 4.272353268196917e-06,
+ "loss": 0.4896,
+ "step": 552
+ },
+ {
+ "epoch": 1.7296450939457202,
+ "grad_norm": 0.9572116732597351,
+ "learning_rate": 4.269297051771178e-06,
+ "loss": 0.5402,
+ "step": 553
+ },
+ {
+ "epoch": 1.732776617954071,
+ "grad_norm": 1.3604938983917236,
+ "learning_rate": 4.266235528629695e-06,
+ "loss": 0.5792,
+ "step": 554
+ },
+ {
+ "epoch": 1.7359081419624216,
+ "grad_norm": 2.067286729812622,
+ "learning_rate": 4.263168707955002e-06,
+ "loss": 0.5033,
+ "step": 555
+ },
+ {
+ "epoch": 1.7390396659707723,
+ "grad_norm": 0.8031097054481506,
+ "learning_rate": 4.260096598945523e-06,
+ "loss": 0.5117,
+ "step": 556
+ },
+ {
+ "epoch": 1.742171189979123,
+ "grad_norm": 1.0241729021072388,
+ "learning_rate": 4.257019210815546e-06,
+ "loss": 0.5359,
+ "step": 557
+ },
+ {
+ "epoch": 1.745302713987474,
+ "grad_norm": 0.7625218629837036,
+ "learning_rate": 4.25393655279519e-06,
+ "loss": 0.5625,
+ "step": 558
+ },
+ {
+ "epoch": 1.7484342379958246,
+ "grad_norm": 0.8603503704071045,
+ "learning_rate": 4.250848634130381e-06,
+ "loss": 0.5043,
+ "step": 559
+ },
+ {
+ "epoch": 1.7515657620041754,
+ "grad_norm": 0.9543750286102295,
+ "learning_rate": 4.247755464082824e-06,
+ "loss": 0.5364,
+ "step": 560
+ },
+ {
+ "epoch": 1.754697286012526,
+ "grad_norm": 0.9707463979721069,
+ "learning_rate": 4.244657051929973e-06,
+ "loss": 0.5184,
+ "step": 561
+ },
+ {
+ "epoch": 1.757828810020877,
+ "grad_norm": 0.7491432428359985,
+ "learning_rate": 4.241553406965008e-06,
+ "loss": 0.559,
+ "step": 562
+ },
+ {
+ "epoch": 1.7609603340292277,
+ "grad_norm": 0.7444972991943359,
+ "learning_rate": 4.238444538496801e-06,
+ "loss": 0.5327,
+ "step": 563
+ },
+ {
+ "epoch": 1.7640918580375784,
+ "grad_norm": 2.7108678817749023,
+ "learning_rate": 4.235330455849892e-06,
+ "loss": 0.55,
+ "step": 564
+ },
+ {
+ "epoch": 1.767223382045929,
+ "grad_norm": 1.6716049909591675,
+ "learning_rate": 4.232211168364459e-06,
+ "loss": 0.5093,
+ "step": 565
+ },
+ {
+ "epoch": 1.7703549060542798,
+ "grad_norm": 0.7023475170135498,
+ "learning_rate": 4.229086685396295e-06,
+ "loss": 0.569,
+ "step": 566
+ },
+ {
+ "epoch": 1.7734864300626305,
+ "grad_norm": 0.8596265316009521,
+ "learning_rate": 4.225957016316771e-06,
+ "loss": 0.5128,
+ "step": 567
+ },
+ {
+ "epoch": 1.7766179540709812,
+ "grad_norm": 0.8110849857330322,
+ "learning_rate": 4.222822170512816e-06,
+ "loss": 0.5142,
+ "step": 568
+ },
+ {
+ "epoch": 1.779749478079332,
+ "grad_norm": 0.7583725452423096,
+ "learning_rate": 4.219682157386884e-06,
+ "loss": 0.5584,
+ "step": 569
+ },
+ {
+ "epoch": 1.7828810020876826,
+ "grad_norm": 0.787811279296875,
+ "learning_rate": 4.21653698635693e-06,
+ "loss": 0.5068,
+ "step": 570
+ },
+ {
+ "epoch": 1.7860125260960333,
+ "grad_norm": 0.8298993110656738,
+ "learning_rate": 4.213386666856375e-06,
+ "loss": 0.5496,
+ "step": 571
+ },
+ {
+ "epoch": 1.789144050104384,
+ "grad_norm": 0.8999841213226318,
+ "learning_rate": 4.210231208334087e-06,
+ "loss": 0.5454,
+ "step": 572
+ },
+ {
+ "epoch": 1.7922755741127347,
+ "grad_norm": 4.264521598815918,
+ "learning_rate": 4.207070620254345e-06,
+ "loss": 0.5486,
+ "step": 573
+ },
+ {
+ "epoch": 1.7954070981210855,
+ "grad_norm": 0.8517448306083679,
+ "learning_rate": 4.203904912096812e-06,
+ "loss": 0.5566,
+ "step": 574
+ },
+ {
+ "epoch": 1.7985386221294362,
+ "grad_norm": 0.9230182766914368,
+ "learning_rate": 4.200734093356511e-06,
+ "loss": 0.4964,
+ "step": 575
+ },
+ {
+ "epoch": 1.801670146137787,
+ "grad_norm": 1.224039912223816,
+ "learning_rate": 4.197558173543791e-06,
+ "loss": 0.5356,
+ "step": 576
+ },
+ {
+ "epoch": 1.8048016701461378,
+ "grad_norm": 0.9998573660850525,
+ "learning_rate": 4.194377162184301e-06,
+ "loss": 0.5334,
+ "step": 577
+ },
+ {
+ "epoch": 1.8079331941544885,
+ "grad_norm": 0.865521252155304,
+ "learning_rate": 4.191191068818963e-06,
+ "loss": 0.5036,
+ "step": 578
+ },
+ {
+ "epoch": 1.8110647181628392,
+ "grad_norm": 0.8048138618469238,
+ "learning_rate": 4.18799990300394e-06,
+ "loss": 0.4979,
+ "step": 579
+ },
+ {
+ "epoch": 1.8141962421711901,
+ "grad_norm": 0.717815637588501,
+ "learning_rate": 4.184803674310609e-06,
+ "loss": 0.5623,
+ "step": 580
+ },
+ {
+ "epoch": 1.8173277661795408,
+ "grad_norm": 0.8403327465057373,
+ "learning_rate": 4.1816023923255335e-06,
+ "loss": 0.5055,
+ "step": 581
+ },
+ {
+ "epoch": 1.8204592901878915,
+ "grad_norm": 0.7298995852470398,
+ "learning_rate": 4.178396066650432e-06,
+ "loss": 0.5641,
+ "step": 582
+ },
+ {
+ "epoch": 1.8235908141962422,
+ "grad_norm": 0.9469727873802185,
+ "learning_rate": 4.1751847069021516e-06,
+ "loss": 0.5557,
+ "step": 583
+ },
+ {
+ "epoch": 1.826722338204593,
+ "grad_norm": 0.8641784191131592,
+ "learning_rate": 4.1719683227126386e-06,
+ "loss": 0.5153,
+ "step": 584
+ },
+ {
+ "epoch": 1.8298538622129437,
+ "grad_norm": 0.7316668629646301,
+ "learning_rate": 4.168746923728908e-06,
+ "loss": 0.4988,
+ "step": 585
+ },
+ {
+ "epoch": 1.8329853862212944,
+ "grad_norm": 0.8795468807220459,
+ "learning_rate": 4.165520519613017e-06,
+ "loss": 0.5483,
+ "step": 586
+ },
+ {
+ "epoch": 1.836116910229645,
+ "grad_norm": 0.7323560118675232,
+ "learning_rate": 4.162289120042034e-06,
+ "loss": 0.5194,
+ "step": 587
+ },
+ {
+ "epoch": 1.8392484342379958,
+ "grad_norm": 0.8217021822929382,
+ "learning_rate": 4.159052734708013e-06,
+ "loss": 0.532,
+ "step": 588
+ },
+ {
+ "epoch": 1.8423799582463465,
+ "grad_norm": 0.7669674754142761,
+ "learning_rate": 4.155811373317958e-06,
+ "loss": 0.541,
+ "step": 589
+ },
+ {
+ "epoch": 1.8455114822546972,
+ "grad_norm": 0.8312156200408936,
+ "learning_rate": 4.152565045593801e-06,
+ "loss": 0.5298,
+ "step": 590
+ },
+ {
+ "epoch": 1.848643006263048,
+ "grad_norm": 0.8967565298080444,
+ "learning_rate": 4.1493137612723665e-06,
+ "loss": 0.51,
+ "step": 591
+ },
+ {
+ "epoch": 1.8517745302713986,
+ "grad_norm": 0.8706664443016052,
+ "learning_rate": 4.14605753010535e-06,
+ "loss": 0.4941,
+ "step": 592
+ },
+ {
+ "epoch": 1.8549060542797495,
+ "grad_norm": 0.7585753798484802,
+ "learning_rate": 4.14279636185928e-06,
+ "loss": 0.5161,
+ "step": 593
+ },
+ {
+ "epoch": 1.8580375782881002,
+ "grad_norm": 0.7495241165161133,
+ "learning_rate": 4.1395302663154954e-06,
+ "loss": 0.5388,
+ "step": 594
+ },
+ {
+ "epoch": 1.861169102296451,
+ "grad_norm": 1.0746862888336182,
+ "learning_rate": 4.136259253270114e-06,
+ "loss": 0.4976,
+ "step": 595
+ },
+ {
+ "epoch": 1.8643006263048016,
+ "grad_norm": 0.872309684753418,
+ "learning_rate": 4.132983332534e-06,
+ "loss": 0.559,
+ "step": 596
+ },
+ {
+ "epoch": 1.8674321503131524,
+ "grad_norm": 0.8759891986846924,
+ "learning_rate": 4.1297025139327405e-06,
+ "loss": 0.5436,
+ "step": 597
+ },
+ {
+ "epoch": 1.8705636743215033,
+ "grad_norm": 1.1044493913650513,
+ "learning_rate": 4.126416807306611e-06,
+ "loss": 0.5476,
+ "step": 598
+ },
+ {
+ "epoch": 1.873695198329854,
+ "grad_norm": 0.8340442180633545,
+ "learning_rate": 4.123126222510549e-06,
+ "loss": 0.4592,
+ "step": 599
+ },
+ {
+ "epoch": 1.8768267223382047,
+ "grad_norm": 0.8331449031829834,
+ "learning_rate": 4.119830769414123e-06,
+ "loss": 0.5219,
+ "step": 600
+ },
+ {
+ "epoch": 1.8799582463465554,
+ "grad_norm": 1.0862973928451538,
+ "learning_rate": 4.116530457901503e-06,
+ "loss": 0.5159,
+ "step": 601
+ },
+ {
+ "epoch": 1.883089770354906,
+ "grad_norm": 0.8524414300918579,
+ "learning_rate": 4.113225297871431e-06,
+ "loss": 0.5502,
+ "step": 602
+ },
+ {
+ "epoch": 1.8862212943632568,
+ "grad_norm": 1.4945416450500488,
+ "learning_rate": 4.10991529923719e-06,
+ "loss": 0.5627,
+ "step": 603
+ },
+ {
+ "epoch": 1.8893528183716075,
+ "grad_norm": 1.5518157482147217,
+ "learning_rate": 4.10660047192658e-06,
+ "loss": 0.5517,
+ "step": 604
+ },
+ {
+ "epoch": 1.8924843423799582,
+ "grad_norm": 2.56638765335083,
+ "learning_rate": 4.103280825881878e-06,
+ "loss": 0.5422,
+ "step": 605
+ },
+ {
+ "epoch": 1.895615866388309,
+ "grad_norm": 0.867254912853241,
+ "learning_rate": 4.099956371059817e-06,
+ "loss": 0.4991,
+ "step": 606
+ },
+ {
+ "epoch": 1.8987473903966596,
+ "grad_norm": 0.9555892944335938,
+ "learning_rate": 4.096627117431554e-06,
+ "loss": 0.5339,
+ "step": 607
+ },
+ {
+ "epoch": 1.9018789144050103,
+ "grad_norm": 0.7905483245849609,
+ "learning_rate": 4.093293074982638e-06,
+ "loss": 0.5168,
+ "step": 608
+ },
+ {
+ "epoch": 1.905010438413361,
+ "grad_norm": 0.7500227093696594,
+ "learning_rate": 4.089954253712981e-06,
+ "loss": 0.5096,
+ "step": 609
+ },
+ {
+ "epoch": 1.9081419624217117,
+ "grad_norm": 0.8458324074745178,
+ "learning_rate": 4.086610663636828e-06,
+ "loss": 0.5296,
+ "step": 610
+ },
+ {
+ "epoch": 1.9112734864300627,
+ "grad_norm": 0.7392706871032715,
+ "learning_rate": 4.08326231478273e-06,
+ "loss": 0.5305,
+ "step": 611
+ },
+ {
+ "epoch": 1.9144050104384134,
+ "grad_norm": 0.8113343715667725,
+ "learning_rate": 4.079909217193508e-06,
+ "loss": 0.5044,
+ "step": 612
+ },
+ {
+ "epoch": 1.917536534446764,
+ "grad_norm": 0.7637801766395569,
+ "learning_rate": 4.076551380926226e-06,
+ "loss": 0.5298,
+ "step": 613
+ },
+ {
+ "epoch": 1.9206680584551148,
+ "grad_norm": 1.0523375272750854,
+ "learning_rate": 4.073188816052164e-06,
+ "loss": 0.5111,
+ "step": 614
+ },
+ {
+ "epoch": 1.9237995824634657,
+ "grad_norm": 0.8224868774414062,
+ "learning_rate": 4.069821532656781e-06,
+ "loss": 0.5178,
+ "step": 615
+ },
+ {
+ "epoch": 1.9269311064718164,
+ "grad_norm": 0.7270777821540833,
+ "learning_rate": 4.066449540839693e-06,
+ "loss": 0.5307,
+ "step": 616
+ },
+ {
+ "epoch": 1.9300626304801671,
+ "grad_norm": 0.7214602828025818,
+ "learning_rate": 4.063072850714631e-06,
+ "loss": 0.5171,
+ "step": 617
+ },
+ {
+ "epoch": 1.9331941544885178,
+ "grad_norm": 0.7333671450614929,
+ "learning_rate": 4.059691472409426e-06,
+ "loss": 0.56,
+ "step": 618
+ },
+ {
+ "epoch": 1.9363256784968685,
+ "grad_norm": 0.9166824221611023,
+ "learning_rate": 4.056305416065964e-06,
+ "loss": 0.5388,
+ "step": 619
+ },
+ {
+ "epoch": 1.9394572025052192,
+ "grad_norm": 0.7743303775787354,
+ "learning_rate": 4.052914691840167e-06,
+ "loss": 0.5134,
+ "step": 620
+ },
+ {
+ "epoch": 1.94258872651357,
+ "grad_norm": 0.704097330570221,
+ "learning_rate": 4.0495193099019524e-06,
+ "loss": 0.4926,
+ "step": 621
+ },
+ {
+ "epoch": 1.9457202505219207,
+ "grad_norm": 0.8508503437042236,
+ "learning_rate": 4.046119280435212e-06,
+ "loss": 0.5008,
+ "step": 622
+ },
+ {
+ "epoch": 1.9488517745302714,
+ "grad_norm": 0.725933313369751,
+ "learning_rate": 4.042714613637775e-06,
+ "loss": 0.5549,
+ "step": 623
+ },
+ {
+ "epoch": 1.951983298538622,
+ "grad_norm": 0.8919175863265991,
+ "learning_rate": 4.039305319721381e-06,
+ "loss": 0.5183,
+ "step": 624
+ },
+ {
+ "epoch": 1.9551148225469728,
+ "grad_norm": 0.827919065952301,
+ "learning_rate": 4.035891408911644e-06,
+ "loss": 0.5624,
+ "step": 625
+ },
+ {
+ "epoch": 1.9582463465553235,
+ "grad_norm": 0.7415187358856201,
+ "learning_rate": 4.032472891448032e-06,
+ "loss": 0.5454,
+ "step": 626
+ },
+ {
+ "epoch": 1.9613778705636742,
+ "grad_norm": 0.7675788998603821,
+ "learning_rate": 4.029049777583824e-06,
+ "loss": 0.5361,
+ "step": 627
+ },
+ {
+ "epoch": 1.964509394572025,
+ "grad_norm": 0.8464030623435974,
+ "learning_rate": 4.025622077586088e-06,
+ "loss": 0.5295,
+ "step": 628
+ },
+ {
+ "epoch": 1.9676409185803758,
+ "grad_norm": 0.7641633749008179,
+ "learning_rate": 4.022189801735646e-06,
+ "loss": 0.55,
+ "step": 629
+ },
+ {
+ "epoch": 1.9707724425887265,
+ "grad_norm": 0.7813227772712708,
+ "learning_rate": 4.018752960327048e-06,
+ "loss": 0.5587,
+ "step": 630
+ },
+ {
+ "epoch": 1.9739039665970772,
+ "grad_norm": 0.7576701641082764,
+ "learning_rate": 4.015311563668533e-06,
+ "loss": 0.5413,
+ "step": 631
+ },
+ {
+ "epoch": 1.977035490605428,
+ "grad_norm": 0.6949650049209595,
+ "learning_rate": 4.011865622082004e-06,
+ "loss": 0.5344,
+ "step": 632
+ },
+ {
+ "epoch": 1.9801670146137789,
+ "grad_norm": 0.9009145498275757,
+ "learning_rate": 4.008415145902997e-06,
+ "loss": 0.5233,
+ "step": 633
+ },
+ {
+ "epoch": 1.9832985386221296,
+ "grad_norm": 0.7635822892189026,
+ "learning_rate": 4.004960145480651e-06,
+ "loss": 0.4981,
+ "step": 634
+ },
+ {
+ "epoch": 1.9864300626304803,
+ "grad_norm": 0.8916334509849548,
+ "learning_rate": 4.0015006311776685e-06,
+ "loss": 0.5311,
+ "step": 635
+ },
+ {
+ "epoch": 1.989561586638831,
+ "grad_norm": 0.7197673320770264,
+ "learning_rate": 3.998036613370295e-06,
+ "loss": 0.5361,
+ "step": 636
+ },
+ {
+ "epoch": 1.9926931106471817,
+ "grad_norm": 0.8391228914260864,
+ "learning_rate": 3.994568102448284e-06,
+ "loss": 0.5473,
+ "step": 637
+ },
+ {
+ "epoch": 1.9958246346555324,
+ "grad_norm": 0.9371750950813293,
+ "learning_rate": 3.991095108814862e-06,
+ "loss": 0.5303,
+ "step": 638
+ },
+ {
+ "epoch": 1.998956158663883,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9876176428867046e-06,
+ "loss": 0.533,
+ "step": 639
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9841357150938984e-06,
+ "loss": 0.1831,
+ "step": 640
+ },
+ {
+ "epoch": 2.0031315240083507,
+ "grad_norm": 0.8802503347396851,
+ "learning_rate": 3.9806493358799135e-06,
+ "loss": 0.493,
+ "step": 641
+ },
+ {
+ "epoch": 2.0062630480167014,
+ "grad_norm": 0.802759051322937,
+ "learning_rate": 3.977158515701571e-06,
+ "loss": 0.498,
+ "step": 642
+ },
+ {
+ "epoch": 2.009394572025052,
+ "grad_norm": 1.0235401391983032,
+ "learning_rate": 3.973663265029013e-06,
+ "loss": 0.4887,
+ "step": 643
+ },
+ {
+ "epoch": 2.012526096033403,
+ "grad_norm": 0.7219089865684509,
+ "learning_rate": 3.97016359434567e-06,
+ "loss": 0.4628,
+ "step": 644
+ },
+ {
+ "epoch": 2.0156576200417535,
+ "grad_norm": 0.7887073755264282,
+ "learning_rate": 3.966659514148229e-06,
+ "loss": 0.525,
+ "step": 645
+ },
+ {
+ "epoch": 2.0187891440501042,
+ "grad_norm": 0.7960914969444275,
+ "learning_rate": 3.963151034946602e-06,
+ "loss": 0.4643,
+ "step": 646
+ },
+ {
+ "epoch": 2.021920668058455,
+ "grad_norm": 0.7902271151542664,
+ "learning_rate": 3.959638167263895e-06,
+ "loss": 0.4922,
+ "step": 647
+ },
+ {
+ "epoch": 2.0250521920668056,
+ "grad_norm": 0.9501478672027588,
+ "learning_rate": 3.956120921636379e-06,
+ "loss": 0.5285,
+ "step": 648
+ },
+ {
+ "epoch": 2.028183716075157,
+ "grad_norm": 0.9510527849197388,
+ "learning_rate": 3.952599308613454e-06,
+ "loss": 0.4909,
+ "step": 649
+ },
+ {
+ "epoch": 2.0313152400835075,
+ "grad_norm": 0.9408219456672668,
+ "learning_rate": 3.949073338757619e-06,
+ "loss": 0.4912,
+ "step": 650
+ },
+ {
+ "epoch": 2.034446764091858,
+ "grad_norm": 0.7148041725158691,
+ "learning_rate": 3.945543022644441e-06,
+ "loss": 0.4792,
+ "step": 651
+ },
+ {
+ "epoch": 2.037578288100209,
+ "grad_norm": 0.7737464904785156,
+ "learning_rate": 3.942008370862522e-06,
+ "loss": 0.4694,
+ "step": 652
+ },
+ {
+ "epoch": 2.0407098121085596,
+ "grad_norm": 0.8405889868736267,
+ "learning_rate": 3.938469394013472e-06,
+ "loss": 0.5048,
+ "step": 653
+ },
+ {
+ "epoch": 2.0438413361169103,
+ "grad_norm": 0.7896456718444824,
+ "learning_rate": 3.934926102711869e-06,
+ "loss": 0.4882,
+ "step": 654
+ },
+ {
+ "epoch": 2.046972860125261,
+ "grad_norm": 0.9290387034416199,
+ "learning_rate": 3.931378507585231e-06,
+ "loss": 0.503,
+ "step": 655
+ },
+ {
+ "epoch": 2.0501043841336117,
+ "grad_norm": 0.7386118769645691,
+ "learning_rate": 3.927826619273991e-06,
+ "loss": 0.4918,
+ "step": 656
+ },
+ {
+ "epoch": 2.0532359081419624,
+ "grad_norm": 0.9878676533699036,
+ "learning_rate": 3.92427044843145e-06,
+ "loss": 0.4958,
+ "step": 657
+ },
+ {
+ "epoch": 2.056367432150313,
+ "grad_norm": 1.0111151933670044,
+ "learning_rate": 3.92071000572376e-06,
+ "loss": 0.4886,
+ "step": 658
+ },
+ {
+ "epoch": 2.059498956158664,
+ "grad_norm": 0.8612061738967896,
+ "learning_rate": 3.917145301829884e-06,
+ "loss": 0.5216,
+ "step": 659
+ },
+ {
+ "epoch": 2.0626304801670146,
+ "grad_norm": 0.7458518743515015,
+ "learning_rate": 3.913576347441564e-06,
+ "loss": 0.4807,
+ "step": 660
+ },
+ {
+ "epoch": 2.0657620041753653,
+ "grad_norm": 0.7775886058807373,
+ "learning_rate": 3.910003153263294e-06,
+ "loss": 0.4837,
+ "step": 661
+ },
+ {
+ "epoch": 2.068893528183716,
+ "grad_norm": 0.7144196629524231,
+ "learning_rate": 3.906425730012282e-06,
+ "loss": 0.5081,
+ "step": 662
+ },
+ {
+ "epoch": 2.0720250521920667,
+ "grad_norm": 0.844971776008606,
+ "learning_rate": 3.9028440884184215e-06,
+ "loss": 0.474,
+ "step": 663
+ },
+ {
+ "epoch": 2.0751565762004174,
+ "grad_norm": 0.9709283113479614,
+ "learning_rate": 3.899258239224257e-06,
+ "loss": 0.503,
+ "step": 664
+ },
+ {
+ "epoch": 2.078288100208768,
+ "grad_norm": 1.1325515508651733,
+ "learning_rate": 3.895668193184954e-06,
+ "loss": 0.5058,
+ "step": 665
+ },
+ {
+ "epoch": 2.081419624217119,
+ "grad_norm": 0.7211254239082336,
+ "learning_rate": 3.892073961068266e-06,
+ "loss": 0.4982,
+ "step": 666
+ },
+ {
+ "epoch": 2.08455114822547,
+ "grad_norm": 0.8975517153739929,
+ "learning_rate": 3.888475553654502e-06,
+ "loss": 0.4699,
+ "step": 667
+ },
+ {
+ "epoch": 2.0876826722338206,
+ "grad_norm": 0.8270771503448486,
+ "learning_rate": 3.884872981736493e-06,
+ "loss": 0.4586,
+ "step": 668
+ },
+ {
+ "epoch": 2.0908141962421714,
+ "grad_norm": 0.8606625199317932,
+ "learning_rate": 3.881266256119561e-06,
+ "loss": 0.5299,
+ "step": 669
+ },
+ {
+ "epoch": 2.093945720250522,
+ "grad_norm": 0.9013976454734802,
+ "learning_rate": 3.877655387621488e-06,
+ "loss": 0.4887,
+ "step": 670
+ },
+ {
+ "epoch": 2.0970772442588728,
+ "grad_norm": 0.7603903412818909,
+ "learning_rate": 3.8740403870724795e-06,
+ "loss": 0.4992,
+ "step": 671
+ },
+ {
+ "epoch": 2.1002087682672235,
+ "grad_norm": 1.0432350635528564,
+ "learning_rate": 3.870421265315137e-06,
+ "loss": 0.5035,
+ "step": 672
+ },
+ {
+ "epoch": 2.103340292275574,
+ "grad_norm": 0.7727136611938477,
+ "learning_rate": 3.8667980332044195e-06,
+ "loss": 0.5006,
+ "step": 673
+ },
+ {
+ "epoch": 2.106471816283925,
+ "grad_norm": 0.9764307141304016,
+ "learning_rate": 3.863170701607618e-06,
+ "loss": 0.5061,
+ "step": 674
+ },
+ {
+ "epoch": 2.1096033402922756,
+ "grad_norm": 0.747818648815155,
+ "learning_rate": 3.859539281404317e-06,
+ "loss": 0.4761,
+ "step": 675
+ },
+ {
+ "epoch": 2.1127348643006263,
+ "grad_norm": 0.7254915237426758,
+ "learning_rate": 3.855903783486364e-06,
+ "loss": 0.5166,
+ "step": 676
+ },
+ {
+ "epoch": 2.115866388308977,
+ "grad_norm": 0.7678592801094055,
+ "learning_rate": 3.852264218757839e-06,
+ "loss": 0.5122,
+ "step": 677
+ },
+ {
+ "epoch": 2.1189979123173277,
+ "grad_norm": 0.8140144348144531,
+ "learning_rate": 3.8486205981350165e-06,
+ "loss": 0.4551,
+ "step": 678
+ },
+ {
+ "epoch": 2.1221294363256784,
+ "grad_norm": 0.9417359232902527,
+ "learning_rate": 3.844972932546338e-06,
+ "loss": 0.4748,
+ "step": 679
+ },
+ {
+ "epoch": 2.125260960334029,
+ "grad_norm": 0.8035290241241455,
+ "learning_rate": 3.841321232932378e-06,
+ "loss": 0.5079,
+ "step": 680
+ },
+ {
+ "epoch": 2.12839248434238,
+ "grad_norm": 0.8300641775131226,
+ "learning_rate": 3.837665510245809e-06,
+ "loss": 0.5018,
+ "step": 681
+ },
+ {
+ "epoch": 2.1315240083507305,
+ "grad_norm": 0.8293547034263611,
+ "learning_rate": 3.8340057754513715e-06,
+ "loss": 0.5042,
+ "step": 682
+ },
+ {
+ "epoch": 2.1346555323590812,
+ "grad_norm": 0.7780007719993591,
+ "learning_rate": 3.8303420395258365e-06,
+ "loss": 0.5048,
+ "step": 683
+ },
+ {
+ "epoch": 2.137787056367432,
+ "grad_norm": 0.7519420385360718,
+ "learning_rate": 3.8266743134579785e-06,
+ "loss": 0.5108,
+ "step": 684
+ },
+ {
+ "epoch": 2.140918580375783,
+ "grad_norm": 0.7872384190559387,
+ "learning_rate": 3.8230026082485404e-06,
+ "loss": 0.4924,
+ "step": 685
+ },
+ {
+ "epoch": 2.144050104384134,
+ "grad_norm": 0.7479491829872131,
+ "learning_rate": 3.819326934910197e-06,
+ "loss": 0.5184,
+ "step": 686
+ },
+ {
+ "epoch": 2.1471816283924845,
+ "grad_norm": 0.8438299298286438,
+ "learning_rate": 3.815647304467527e-06,
+ "loss": 0.4791,
+ "step": 687
+ },
+ {
+ "epoch": 2.150313152400835,
+ "grad_norm": 0.7923721671104431,
+ "learning_rate": 3.8119637279569773e-06,
+ "loss": 0.5305,
+ "step": 688
+ },
+ {
+ "epoch": 2.153444676409186,
+ "grad_norm": 0.7856534719467163,
+ "learning_rate": 3.80827621642683e-06,
+ "loss": 0.5063,
+ "step": 689
+ },
+ {
+ "epoch": 2.1565762004175366,
+ "grad_norm": 0.8544500470161438,
+ "learning_rate": 3.8045847809371706e-06,
+ "loss": 0.4989,
+ "step": 690
+ },
+ {
+ "epoch": 2.1597077244258873,
+ "grad_norm": 0.865390956401825,
+ "learning_rate": 3.800889432559852e-06,
+ "loss": 0.4931,
+ "step": 691
+ },
+ {
+ "epoch": 2.162839248434238,
+ "grad_norm": 0.9809399247169495,
+ "learning_rate": 3.797190182378466e-06,
+ "loss": 0.4785,
+ "step": 692
+ },
+ {
+ "epoch": 2.1659707724425887,
+ "grad_norm": 0.7954536080360413,
+ "learning_rate": 3.793487041488304e-06,
+ "loss": 0.4847,
+ "step": 693
+ },
+ {
+ "epoch": 2.1691022964509394,
+ "grad_norm": 0.754704475402832,
+ "learning_rate": 3.7897800209963298e-06,
+ "loss": 0.5125,
+ "step": 694
+ },
+ {
+ "epoch": 2.17223382045929,
+ "grad_norm": 0.7319822311401367,
+ "learning_rate": 3.7860691320211414e-06,
+ "loss": 0.477,
+ "step": 695
+ },
+ {
+ "epoch": 2.175365344467641,
+ "grad_norm": 0.8198635578155518,
+ "learning_rate": 3.7823543856929403e-06,
+ "loss": 0.4764,
+ "step": 696
+ },
+ {
+ "epoch": 2.1784968684759916,
+ "grad_norm": 0.708933413028717,
+ "learning_rate": 3.7786357931534987e-06,
+ "loss": 0.4948,
+ "step": 697
+ },
+ {
+ "epoch": 2.1816283924843423,
+ "grad_norm": 0.8493193984031677,
+ "learning_rate": 3.774913365556123e-06,
+ "loss": 0.5271,
+ "step": 698
+ },
+ {
+ "epoch": 2.184759916492693,
+ "grad_norm": 0.7999475002288818,
+ "learning_rate": 3.771187114065622e-06,
+ "loss": 0.4804,
+ "step": 699
+ },
+ {
+ "epoch": 2.1878914405010437,
+ "grad_norm": 0.8366796970367432,
+ "learning_rate": 3.7674570498582776e-06,
+ "loss": 0.457,
+ "step": 700
+ },
+ {
+ "epoch": 2.1910229645093944,
+ "grad_norm": 0.7935530543327332,
+ "learning_rate": 3.7637231841218015e-06,
+ "loss": 0.5001,
+ "step": 701
+ },
+ {
+ "epoch": 2.1941544885177455,
+ "grad_norm": 0.7700153589248657,
+ "learning_rate": 3.7599855280553125e-06,
+ "loss": 0.5091,
+ "step": 702
+ },
+ {
+ "epoch": 2.1972860125260962,
+ "grad_norm": 0.7991652488708496,
+ "learning_rate": 3.756244092869294e-06,
+ "loss": 0.4955,
+ "step": 703
+ },
+ {
+ "epoch": 2.200417536534447,
+ "grad_norm": 0.720051646232605,
+ "learning_rate": 3.752498889785567e-06,
+ "loss": 0.4902,
+ "step": 704
+ },
+ {
+ "epoch": 2.2035490605427976,
+ "grad_norm": 0.7312369346618652,
+ "learning_rate": 3.748749930037252e-06,
+ "loss": 0.4935,
+ "step": 705
+ },
+ {
+ "epoch": 2.2066805845511483,
+ "grad_norm": 0.8418563008308411,
+ "learning_rate": 3.744997224868739e-06,
+ "loss": 0.5186,
+ "step": 706
+ },
+ {
+ "epoch": 2.209812108559499,
+ "grad_norm": 0.8324081301689148,
+ "learning_rate": 3.741240785535649e-06,
+ "loss": 0.482,
+ "step": 707
+ },
+ {
+ "epoch": 2.2129436325678498,
+ "grad_norm": 0.8051855564117432,
+ "learning_rate": 3.737480623304805e-06,
+ "loss": 0.4663,
+ "step": 708
+ },
+ {
+ "epoch": 2.2160751565762005,
+ "grad_norm": 0.9464184641838074,
+ "learning_rate": 3.7337167494541948e-06,
+ "loss": 0.451,
+ "step": 709
+ },
+ {
+ "epoch": 2.219206680584551,
+ "grad_norm": 1.0227075815200806,
+ "learning_rate": 3.729949175272942e-06,
+ "loss": 0.4428,
+ "step": 710
+ },
+ {
+ "epoch": 2.222338204592902,
+ "grad_norm": 0.7930364012718201,
+ "learning_rate": 3.7261779120612633e-06,
+ "loss": 0.5132,
+ "step": 711
+ },
+ {
+ "epoch": 2.2254697286012526,
+ "grad_norm": 0.9033688306808472,
+ "learning_rate": 3.7224029711304444e-06,
+ "loss": 0.476,
+ "step": 712
+ },
+ {
+ "epoch": 2.2286012526096033,
+ "grad_norm": 0.8026887774467468,
+ "learning_rate": 3.7186243638028007e-06,
+ "loss": 0.4959,
+ "step": 713
+ },
+ {
+ "epoch": 2.231732776617954,
+ "grad_norm": 0.9391745328903198,
+ "learning_rate": 3.714842101411642e-06,
+ "loss": 0.4962,
+ "step": 714
+ },
+ {
+ "epoch": 2.2348643006263047,
+ "grad_norm": 0.7774361371994019,
+ "learning_rate": 3.711056195301245e-06,
+ "loss": 0.4748,
+ "step": 715
+ },
+ {
+ "epoch": 2.2379958246346554,
+ "grad_norm": 0.9278722405433655,
+ "learning_rate": 3.7072666568268115e-06,
+ "loss": 0.5074,
+ "step": 716
+ },
+ {
+ "epoch": 2.241127348643006,
+ "grad_norm": 0.771423876285553,
+ "learning_rate": 3.7034734973544406e-06,
+ "loss": 0.5072,
+ "step": 717
+ },
+ {
+ "epoch": 2.244258872651357,
+ "grad_norm": 0.8707448244094849,
+ "learning_rate": 3.6996767282610892e-06,
+ "loss": 0.4851,
+ "step": 718
+ },
+ {
+ "epoch": 2.2473903966597075,
+ "grad_norm": 0.7641019821166992,
+ "learning_rate": 3.695876360934543e-06,
+ "loss": 0.4941,
+ "step": 719
+ },
+ {
+ "epoch": 2.2505219206680582,
+ "grad_norm": 0.7647167444229126,
+ "learning_rate": 3.69207240677338e-06,
+ "loss": 0.5225,
+ "step": 720
+ },
+ {
+ "epoch": 2.2536534446764094,
+ "grad_norm": 0.9108865261077881,
+ "learning_rate": 3.6882648771869345e-06,
+ "loss": 0.454,
+ "step": 721
+ },
+ {
+ "epoch": 2.25678496868476,
+ "grad_norm": 0.86728835105896,
+ "learning_rate": 3.6844537835952666e-06,
+ "loss": 0.4461,
+ "step": 722
+ },
+ {
+ "epoch": 2.259916492693111,
+ "grad_norm": 1.1055282354354858,
+ "learning_rate": 3.6806391374291238e-06,
+ "loss": 0.4618,
+ "step": 723
+ },
+ {
+ "epoch": 2.2630480167014615,
+ "grad_norm": 0.7591858506202698,
+ "learning_rate": 3.6768209501299116e-06,
+ "loss": 0.4901,
+ "step": 724
+ },
+ {
+ "epoch": 2.266179540709812,
+ "grad_norm": 0.7966359257698059,
+ "learning_rate": 3.6729992331496554e-06,
+ "loss": 0.5171,
+ "step": 725
+ },
+ {
+ "epoch": 2.269311064718163,
+ "grad_norm": 0.983161211013794,
+ "learning_rate": 3.6691739979509672e-06,
+ "loss": 0.4949,
+ "step": 726
+ },
+ {
+ "epoch": 2.2724425887265136,
+ "grad_norm": 0.9200037121772766,
+ "learning_rate": 3.6653452560070106e-06,
+ "loss": 0.5234,
+ "step": 727
+ },
+ {
+ "epoch": 2.2755741127348643,
+ "grad_norm": 1.0288461446762085,
+ "learning_rate": 3.6615130188014685e-06,
+ "loss": 0.4713,
+ "step": 728
+ },
+ {
+ "epoch": 2.278705636743215,
+ "grad_norm": 0.7325463891029358,
+ "learning_rate": 3.6576772978285065e-06,
+ "loss": 0.527,
+ "step": 729
+ },
+ {
+ "epoch": 2.2818371607515657,
+ "grad_norm": 1.0045446157455444,
+ "learning_rate": 3.6538381045927395e-06,
+ "loss": 0.5139,
+ "step": 730
+ },
+ {
+ "epoch": 2.2849686847599164,
+ "grad_norm": 0.7391849756240845,
+ "learning_rate": 3.6499954506091963e-06,
+ "loss": 0.4829,
+ "step": 731
+ },
+ {
+ "epoch": 2.288100208768267,
+ "grad_norm": 0.7808229923248291,
+ "learning_rate": 3.646149347403286e-06,
+ "loss": 0.4831,
+ "step": 732
+ },
+ {
+ "epoch": 2.291231732776618,
+ "grad_norm": 0.7056961059570312,
+ "learning_rate": 3.6422998065107628e-06,
+ "loss": 0.5066,
+ "step": 733
+ },
+ {
+ "epoch": 2.2943632567849686,
+ "grad_norm": 0.7498443126678467,
+ "learning_rate": 3.6384468394776935e-06,
+ "loss": 0.4724,
+ "step": 734
+ },
+ {
+ "epoch": 2.2974947807933193,
+ "grad_norm": 0.8511576056480408,
+ "learning_rate": 3.634590457860418e-06,
+ "loss": 0.5286,
+ "step": 735
+ },
+ {
+ "epoch": 2.30062630480167,
+ "grad_norm": 0.873635470867157,
+ "learning_rate": 3.63073067322552e-06,
+ "loss": 0.4751,
+ "step": 736
+ },
+ {
+ "epoch": 2.3037578288100207,
+ "grad_norm": 0.7427377104759216,
+ "learning_rate": 3.626867497149788e-06,
+ "loss": 0.475,
+ "step": 737
+ },
+ {
+ "epoch": 2.306889352818372,
+ "grad_norm": 1.0591017007827759,
+ "learning_rate": 3.623000941220186e-06,
+ "loss": 0.4591,
+ "step": 738
+ },
+ {
+ "epoch": 2.3100208768267225,
+ "grad_norm": 0.8767879009246826,
+ "learning_rate": 3.6191310170338114e-06,
+ "loss": 0.4673,
+ "step": 739
+ },
+ {
+ "epoch": 2.3131524008350732,
+ "grad_norm": 0.9156234860420227,
+ "learning_rate": 3.615257736197866e-06,
+ "loss": 0.4622,
+ "step": 740
+ },
+ {
+ "epoch": 2.316283924843424,
+ "grad_norm": 0.6743756532669067,
+ "learning_rate": 3.611381110329619e-06,
+ "loss": 0.4723,
+ "step": 741
+ },
+ {
+ "epoch": 2.3194154488517746,
+ "grad_norm": 0.8655558228492737,
+ "learning_rate": 3.6075011510563732e-06,
+ "loss": 0.471,
+ "step": 742
+ },
+ {
+ "epoch": 2.3225469728601253,
+ "grad_norm": 0.7652033567428589,
+ "learning_rate": 3.603617870015429e-06,
+ "loss": 0.5155,
+ "step": 743
+ },
+ {
+ "epoch": 2.325678496868476,
+ "grad_norm": 0.7970699667930603,
+ "learning_rate": 3.599731278854049e-06,
+ "loss": 0.4507,
+ "step": 744
+ },
+ {
+ "epoch": 2.3288100208768268,
+ "grad_norm": 0.7538278698921204,
+ "learning_rate": 3.5958413892294253e-06,
+ "loss": 0.5093,
+ "step": 745
+ },
+ {
+ "epoch": 2.3319415448851775,
+ "grad_norm": 0.735996663570404,
+ "learning_rate": 3.5919482128086414e-06,
+ "loss": 0.5008,
+ "step": 746
+ },
+ {
+ "epoch": 2.335073068893528,
+ "grad_norm": 0.7643904685974121,
+ "learning_rate": 3.588051761268642e-06,
+ "loss": 0.5072,
+ "step": 747
+ },
+ {
+ "epoch": 2.338204592901879,
+ "grad_norm": 0.7646260857582092,
+ "learning_rate": 3.584152046296191e-06,
+ "loss": 0.4578,
+ "step": 748
+ },
+ {
+ "epoch": 2.3413361169102296,
+ "grad_norm": 0.7873825430870056,
+ "learning_rate": 3.5802490795878446e-06,
+ "loss": 0.5249,
+ "step": 749
+ },
+ {
+ "epoch": 2.3444676409185803,
+ "grad_norm": 1.095333218574524,
+ "learning_rate": 3.5763428728499095e-06,
+ "loss": 0.4913,
+ "step": 750
+ },
+ {
+ "epoch": 2.347599164926931,
+ "grad_norm": 1.3425395488739014,
+ "learning_rate": 3.5724334377984107e-06,
+ "loss": 0.5317,
+ "step": 751
+ },
+ {
+ "epoch": 2.3507306889352817,
+ "grad_norm": 0.7151113748550415,
+ "learning_rate": 3.568520786159055e-06,
+ "loss": 0.5135,
+ "step": 752
+ },
+ {
+ "epoch": 2.3538622129436324,
+ "grad_norm": 0.8072878122329712,
+ "learning_rate": 3.5646049296672004e-06,
+ "loss": 0.4863,
+ "step": 753
+ },
+ {
+ "epoch": 2.356993736951983,
+ "grad_norm": 0.8040189743041992,
+ "learning_rate": 3.5606858800678123e-06,
+ "loss": 0.4668,
+ "step": 754
+ },
+ {
+ "epoch": 2.3601252609603343,
+ "grad_norm": 0.7749765515327454,
+ "learning_rate": 3.5567636491154385e-06,
+ "loss": 0.4681,
+ "step": 755
+ },
+ {
+ "epoch": 2.3632567849686845,
+ "grad_norm": 0.773013710975647,
+ "learning_rate": 3.5528382485741638e-06,
+ "loss": 0.5012,
+ "step": 756
+ },
+ {
+ "epoch": 2.3663883089770357,
+ "grad_norm": 0.7017714381217957,
+ "learning_rate": 3.5489096902175835e-06,
+ "loss": 0.5019,
+ "step": 757
+ },
+ {
+ "epoch": 2.3695198329853864,
+ "grad_norm": 1.132458209991455,
+ "learning_rate": 3.5449779858287625e-06,
+ "loss": 0.5131,
+ "step": 758
+ },
+ {
+ "epoch": 2.372651356993737,
+ "grad_norm": 0.7624574899673462,
+ "learning_rate": 3.541043147200202e-06,
+ "loss": 0.4856,
+ "step": 759
+ },
+ {
+ "epoch": 2.375782881002088,
+ "grad_norm": 0.9078478217124939,
+ "learning_rate": 3.5371051861338036e-06,
+ "loss": 0.4337,
+ "step": 760
+ },
+ {
+ "epoch": 2.3789144050104385,
+ "grad_norm": 0.8608354330062866,
+ "learning_rate": 3.5331641144408344e-06,
+ "loss": 0.5053,
+ "step": 761
+ },
+ {
+ "epoch": 2.382045929018789,
+ "grad_norm": 0.775047779083252,
+ "learning_rate": 3.529219943941892e-06,
+ "loss": 0.4779,
+ "step": 762
+ },
+ {
+ "epoch": 2.38517745302714,
+ "grad_norm": 0.7775866389274597,
+ "learning_rate": 3.525272686466866e-06,
+ "loss": 0.4979,
+ "step": 763
+ },
+ {
+ "epoch": 2.3883089770354906,
+ "grad_norm": 0.9386464357376099,
+ "learning_rate": 3.521322353854908e-06,
+ "loss": 0.5222,
+ "step": 764
+ },
+ {
+ "epoch": 2.3914405010438413,
+ "grad_norm": 0.874109148979187,
+ "learning_rate": 3.517368957954391e-06,
+ "loss": 0.4681,
+ "step": 765
+ },
+ {
+ "epoch": 2.394572025052192,
+ "grad_norm": 0.824588418006897,
+ "learning_rate": 3.5134125106228766e-06,
+ "loss": 0.4955,
+ "step": 766
+ },
+ {
+ "epoch": 2.3977035490605427,
+ "grad_norm": 0.8790764808654785,
+ "learning_rate": 3.5094530237270774e-06,
+ "loss": 0.4722,
+ "step": 767
+ },
+ {
+ "epoch": 2.4008350730688934,
+ "grad_norm": 1.1399786472320557,
+ "learning_rate": 3.5054905091428253e-06,
+ "loss": 0.4771,
+ "step": 768
+ },
+ {
+ "epoch": 2.403966597077244,
+ "grad_norm": 1.2586532831192017,
+ "learning_rate": 3.50152497875503e-06,
+ "loss": 0.4849,
+ "step": 769
+ },
+ {
+ "epoch": 2.407098121085595,
+ "grad_norm": 0.7706464529037476,
+ "learning_rate": 3.4975564444576487e-06,
+ "loss": 0.477,
+ "step": 770
+ },
+ {
+ "epoch": 2.4102296450939455,
+ "grad_norm": 0.7695909142494202,
+ "learning_rate": 3.4935849181536484e-06,
+ "loss": 0.4695,
+ "step": 771
+ },
+ {
+ "epoch": 2.4133611691022967,
+ "grad_norm": 0.7744433283805847,
+ "learning_rate": 3.489610411754969e-06,
+ "loss": 0.499,
+ "step": 772
+ },
+ {
+ "epoch": 2.416492693110647,
+ "grad_norm": 0.9265744686126709,
+ "learning_rate": 3.48563293718249e-06,
+ "loss": 0.481,
+ "step": 773
+ },
+ {
+ "epoch": 2.419624217118998,
+ "grad_norm": 1.0680506229400635,
+ "learning_rate": 3.481652506365992e-06,
+ "loss": 0.4898,
+ "step": 774
+ },
+ {
+ "epoch": 2.422755741127349,
+ "grad_norm": 0.721493661403656,
+ "learning_rate": 3.477669131244122e-06,
+ "loss": 0.4813,
+ "step": 775
+ },
+ {
+ "epoch": 2.4258872651356995,
+ "grad_norm": 0.7993559837341309,
+ "learning_rate": 3.4736828237643616e-06,
+ "loss": 0.5179,
+ "step": 776
+ },
+ {
+ "epoch": 2.4290187891440502,
+ "grad_norm": 0.8148090839385986,
+ "learning_rate": 3.4696935958829837e-06,
+ "loss": 0.4753,
+ "step": 777
+ },
+ {
+ "epoch": 2.432150313152401,
+ "grad_norm": 0.8006406426429749,
+ "learning_rate": 3.465701459565022e-06,
+ "loss": 0.501,
+ "step": 778
+ },
+ {
+ "epoch": 2.4352818371607516,
+ "grad_norm": 0.9307970404624939,
+ "learning_rate": 3.4617064267842327e-06,
+ "loss": 0.487,
+ "step": 779
+ },
+ {
+ "epoch": 2.4384133611691023,
+ "grad_norm": 0.7192814946174622,
+ "learning_rate": 3.45770850952306e-06,
+ "loss": 0.4769,
+ "step": 780
+ },
+ {
+ "epoch": 2.441544885177453,
+ "grad_norm": 0.7386271953582764,
+ "learning_rate": 3.4537077197726023e-06,
+ "loss": 0.4726,
+ "step": 781
+ },
+ {
+ "epoch": 2.4446764091858038,
+ "grad_norm": 0.8006314039230347,
+ "learning_rate": 3.449704069532567e-06,
+ "loss": 0.494,
+ "step": 782
+ },
+ {
+ "epoch": 2.4478079331941545,
+ "grad_norm": 0.7466752529144287,
+ "learning_rate": 3.4456975708112477e-06,
+ "loss": 0.4778,
+ "step": 783
+ },
+ {
+ "epoch": 2.450939457202505,
+ "grad_norm": 0.8348856568336487,
+ "learning_rate": 3.4416882356254777e-06,
+ "loss": 0.4766,
+ "step": 784
+ },
+ {
+ "epoch": 2.454070981210856,
+ "grad_norm": 0.754851758480072,
+ "learning_rate": 3.4376760760005994e-06,
+ "loss": 0.4673,
+ "step": 785
+ },
+ {
+ "epoch": 2.4572025052192066,
+ "grad_norm": 0.7854018807411194,
+ "learning_rate": 3.433661103970427e-06,
+ "loss": 0.4954,
+ "step": 786
+ },
+ {
+ "epoch": 2.4603340292275573,
+ "grad_norm": 0.7238256931304932,
+ "learning_rate": 3.4296433315772084e-06,
+ "loss": 0.496,
+ "step": 787
+ },
+ {
+ "epoch": 2.463465553235908,
+ "grad_norm": 0.7007659673690796,
+ "learning_rate": 3.4256227708715915e-06,
+ "loss": 0.4793,
+ "step": 788
+ },
+ {
+ "epoch": 2.4665970772442587,
+ "grad_norm": 0.7234371900558472,
+ "learning_rate": 3.421599433912588e-06,
+ "loss": 0.4935,
+ "step": 789
+ },
+ {
+ "epoch": 2.4697286012526094,
+ "grad_norm": 0.7537544965744019,
+ "learning_rate": 3.4175733327675355e-06,
+ "loss": 0.5194,
+ "step": 790
+ },
+ {
+ "epoch": 2.4728601252609606,
+ "grad_norm": 0.7608047127723694,
+ "learning_rate": 3.4135444795120633e-06,
+ "loss": 0.4793,
+ "step": 791
+ },
+ {
+ "epoch": 2.4759916492693113,
+ "grad_norm": 0.7847898006439209,
+ "learning_rate": 3.4095128862300542e-06,
+ "loss": 0.4877,
+ "step": 792
+ },
+ {
+ "epoch": 2.479123173277662,
+ "grad_norm": 0.8002011179924011,
+ "learning_rate": 3.405478565013609e-06,
+ "loss": 0.4927,
+ "step": 793
+ },
+ {
+ "epoch": 2.4822546972860127,
+ "grad_norm": 0.8200219869613647,
+ "learning_rate": 3.401441527963013e-06,
+ "loss": 0.4997,
+ "step": 794
+ },
+ {
+ "epoch": 2.4853862212943634,
+ "grad_norm": 0.7220162749290466,
+ "learning_rate": 3.3974017871866938e-06,
+ "loss": 0.4668,
+ "step": 795
+ },
+ {
+ "epoch": 2.488517745302714,
+ "grad_norm": 0.8022251129150391,
+ "learning_rate": 3.3933593548011912e-06,
+ "loss": 0.5179,
+ "step": 796
+ },
+ {
+ "epoch": 2.491649269311065,
+ "grad_norm": 0.7914465069770813,
+ "learning_rate": 3.389314242931115e-06,
+ "loss": 0.4943,
+ "step": 797
+ },
+ {
+ "epoch": 2.4947807933194155,
+ "grad_norm": 1.1399403810501099,
+ "learning_rate": 3.385266463709116e-06,
+ "loss": 0.4896,
+ "step": 798
+ },
+ {
+ "epoch": 2.497912317327766,
+ "grad_norm": 0.8098909854888916,
+ "learning_rate": 3.38121602927584e-06,
+ "loss": 0.4904,
+ "step": 799
+ },
+ {
+ "epoch": 2.501043841336117,
+ "grad_norm": 0.7434052228927612,
+ "learning_rate": 3.377162951779902e-06,
+ "loss": 0.4864,
+ "step": 800
+ },
+ {
+ "epoch": 2.5041753653444676,
+ "grad_norm": 0.7397809624671936,
+ "learning_rate": 3.3731072433778407e-06,
+ "loss": 0.486,
+ "step": 801
+ },
+ {
+ "epoch": 2.5073068893528183,
+ "grad_norm": 0.99027019739151,
+ "learning_rate": 3.3690489162340867e-06,
+ "loss": 0.5011,
+ "step": 802
+ },
+ {
+ "epoch": 2.510438413361169,
+ "grad_norm": 0.8443610668182373,
+ "learning_rate": 3.3649879825209246e-06,
+ "loss": 0.455,
+ "step": 803
+ },
+ {
+ "epoch": 2.5135699373695197,
+ "grad_norm": 0.755649983882904,
+ "learning_rate": 3.3609244544184604e-06,
+ "loss": 0.4563,
+ "step": 804
+ },
+ {
+ "epoch": 2.5167014613778704,
+ "grad_norm": 0.728018045425415,
+ "learning_rate": 3.3568583441145765e-06,
+ "loss": 0.471,
+ "step": 805
+ },
+ {
+ "epoch": 2.519832985386221,
+ "grad_norm": 0.7777130603790283,
+ "learning_rate": 3.352789663804904e-06,
+ "loss": 0.4667,
+ "step": 806
+ },
+ {
+ "epoch": 2.522964509394572,
+ "grad_norm": 0.7545619606971741,
+ "learning_rate": 3.3487184256927785e-06,
+ "loss": 0.4915,
+ "step": 807
+ },
+ {
+ "epoch": 2.526096033402923,
+ "grad_norm": 0.8374579548835754,
+ "learning_rate": 3.3446446419892127e-06,
+ "loss": 0.485,
+ "step": 808
+ },
+ {
+ "epoch": 2.5292275574112733,
+ "grad_norm": 0.7354666590690613,
+ "learning_rate": 3.340568324912849e-06,
+ "loss": 0.5254,
+ "step": 809
+ },
+ {
+ "epoch": 2.5323590814196244,
+ "grad_norm": 0.7581545114517212,
+ "learning_rate": 3.3364894866899324e-06,
+ "loss": 0.4483,
+ "step": 810
+ },
+ {
+ "epoch": 2.535490605427975,
+ "grad_norm": 0.8077559471130371,
+ "learning_rate": 3.3324081395542662e-06,
+ "loss": 0.5022,
+ "step": 811
+ },
+ {
+ "epoch": 2.538622129436326,
+ "grad_norm": 0.8827865719795227,
+ "learning_rate": 3.3283242957471806e-06,
+ "loss": 0.4909,
+ "step": 812
+ },
+ {
+ "epoch": 2.5417536534446765,
+ "grad_norm": 0.9139482378959656,
+ "learning_rate": 3.3242379675174953e-06,
+ "loss": 0.5205,
+ "step": 813
+ },
+ {
+ "epoch": 2.5448851774530272,
+ "grad_norm": 0.7616812586784363,
+ "learning_rate": 3.3201491671214797e-06,
+ "loss": 0.4744,
+ "step": 814
+ },
+ {
+ "epoch": 2.548016701461378,
+ "grad_norm": 0.987173318862915,
+ "learning_rate": 3.3160579068228183e-06,
+ "loss": 0.4876,
+ "step": 815
+ },
+ {
+ "epoch": 2.5511482254697286,
+ "grad_norm": 1.259137749671936,
+ "learning_rate": 3.311964198892574e-06,
+ "loss": 0.454,
+ "step": 816
+ },
+ {
+ "epoch": 2.5542797494780793,
+ "grad_norm": 0.7866336703300476,
+ "learning_rate": 3.3078680556091513e-06,
+ "loss": 0.5107,
+ "step": 817
+ },
+ {
+ "epoch": 2.55741127348643,
+ "grad_norm": 0.9311352372169495,
+ "learning_rate": 3.303769489258258e-06,
+ "loss": 0.4843,
+ "step": 818
+ },
+ {
+ "epoch": 2.5605427974947808,
+ "grad_norm": 0.8556346893310547,
+ "learning_rate": 3.299668512132872e-06,
+ "loss": 0.5017,
+ "step": 819
+ },
+ {
+ "epoch": 2.5636743215031315,
+ "grad_norm": 2.810598373413086,
+ "learning_rate": 3.2955651365331988e-06,
+ "loss": 0.5223,
+ "step": 820
+ },
+ {
+ "epoch": 2.566805845511482,
+ "grad_norm": 1.0120766162872314,
+ "learning_rate": 3.29145937476664e-06,
+ "loss": 0.4959,
+ "step": 821
+ },
+ {
+ "epoch": 2.569937369519833,
+ "grad_norm": 0.751412034034729,
+ "learning_rate": 3.287351239147752e-06,
+ "loss": 0.4941,
+ "step": 822
+ },
+ {
+ "epoch": 2.5730688935281836,
+ "grad_norm": 1.9308148622512817,
+ "learning_rate": 3.2832407419982136e-06,
+ "loss": 0.4965,
+ "step": 823
+ },
+ {
+ "epoch": 2.5762004175365343,
+ "grad_norm": 0.9215649962425232,
+ "learning_rate": 3.279127895646786e-06,
+ "loss": 0.5071,
+ "step": 824
+ },
+ {
+ "epoch": 2.5793319415448854,
+ "grad_norm": 0.7599574327468872,
+ "learning_rate": 3.2750127124292754e-06,
+ "loss": 0.5191,
+ "step": 825
+ },
+ {
+ "epoch": 2.5824634655532357,
+ "grad_norm": 0.8234940767288208,
+ "learning_rate": 3.270895204688496e-06,
+ "loss": 0.4947,
+ "step": 826
+ },
+ {
+ "epoch": 2.585594989561587,
+ "grad_norm": 0.8401572704315186,
+ "learning_rate": 3.266775384774238e-06,
+ "loss": 0.4547,
+ "step": 827
+ },
+ {
+ "epoch": 2.588726513569937,
+ "grad_norm": 0.8927991986274719,
+ "learning_rate": 3.262653265043223e-06,
+ "loss": 0.4296,
+ "step": 828
+ },
+ {
+ "epoch": 2.5918580375782883,
+ "grad_norm": 0.8009241223335266,
+ "learning_rate": 3.2585288578590716e-06,
+ "loss": 0.4578,
+ "step": 829
+ },
+ {
+ "epoch": 2.594989561586639,
+ "grad_norm": 0.7982021570205688,
+ "learning_rate": 3.2544021755922663e-06,
+ "loss": 0.4961,
+ "step": 830
+ },
+ {
+ "epoch": 2.5981210855949897,
+ "grad_norm": 0.7096095681190491,
+ "learning_rate": 3.2502732306201112e-06,
+ "loss": 0.4975,
+ "step": 831
+ },
+ {
+ "epoch": 2.6012526096033404,
+ "grad_norm": 1.1092045307159424,
+ "learning_rate": 3.246142035326699e-06,
+ "loss": 0.4705,
+ "step": 832
+ },
+ {
+ "epoch": 2.604384133611691,
+ "grad_norm": 0.785799503326416,
+ "learning_rate": 3.24200860210287e-06,
+ "loss": 0.479,
+ "step": 833
+ },
+ {
+ "epoch": 2.607515657620042,
+ "grad_norm": 0.7315773367881775,
+ "learning_rate": 3.2378729433461804e-06,
+ "loss": 0.5036,
+ "step": 834
+ },
+ {
+ "epoch": 2.6106471816283925,
+ "grad_norm": 0.7840189337730408,
+ "learning_rate": 3.233735071460856e-06,
+ "loss": 0.4967,
+ "step": 835
+ },
+ {
+ "epoch": 2.613778705636743,
+ "grad_norm": 0.7186565399169922,
+ "learning_rate": 3.2295949988577655e-06,
+ "loss": 0.4889,
+ "step": 836
+ },
+ {
+ "epoch": 2.616910229645094,
+ "grad_norm": 0.766054093837738,
+ "learning_rate": 3.2254527379543747e-06,
+ "loss": 0.539,
+ "step": 837
+ },
+ {
+ "epoch": 2.6200417536534446,
+ "grad_norm": 0.7705381512641907,
+ "learning_rate": 3.2213083011747165e-06,
+ "loss": 0.4968,
+ "step": 838
+ },
+ {
+ "epoch": 2.6231732776617953,
+ "grad_norm": 1.3530604839324951,
+ "learning_rate": 3.217161700949346e-06,
+ "loss": 0.52,
+ "step": 839
+ },
+ {
+ "epoch": 2.626304801670146,
+ "grad_norm": 0.737389862537384,
+ "learning_rate": 3.2130129497153107e-06,
+ "loss": 0.4823,
+ "step": 840
+ },
+ {
+ "epoch": 2.6294363256784967,
+ "grad_norm": 0.9121193885803223,
+ "learning_rate": 3.2088620599161064e-06,
+ "loss": 0.4592,
+ "step": 841
+ },
+ {
+ "epoch": 2.632567849686848,
+ "grad_norm": 0.8869616389274597,
+ "learning_rate": 3.2047090440016464e-06,
+ "loss": 0.5001,
+ "step": 842
+ },
+ {
+ "epoch": 2.635699373695198,
+ "grad_norm": 0.8447219133377075,
+ "learning_rate": 3.200553914428219e-06,
+ "loss": 0.4969,
+ "step": 843
+ },
+ {
+ "epoch": 2.6388308977035493,
+ "grad_norm": 0.8877657055854797,
+ "learning_rate": 3.1963966836584524e-06,
+ "loss": 0.4718,
+ "step": 844
+ },
+ {
+ "epoch": 2.6419624217118995,
+ "grad_norm": 1.045272946357727,
+ "learning_rate": 3.192237364161277e-06,
+ "loss": 0.4864,
+ "step": 845
+ },
+ {
+ "epoch": 2.6450939457202507,
+ "grad_norm": 0.8485913276672363,
+ "learning_rate": 3.1880759684118876e-06,
+ "loss": 0.4688,
+ "step": 846
+ },
+ {
+ "epoch": 2.6482254697286014,
+ "grad_norm": 0.7328930497169495,
+ "learning_rate": 3.183912508891709e-06,
+ "loss": 0.4728,
+ "step": 847
+ },
+ {
+ "epoch": 2.651356993736952,
+ "grad_norm": 0.7377315759658813,
+ "learning_rate": 3.179746998088351e-06,
+ "loss": 0.4672,
+ "step": 848
+ },
+ {
+ "epoch": 2.654488517745303,
+ "grad_norm": 0.8017002940177917,
+ "learning_rate": 3.1755794484955817e-06,
+ "loss": 0.4884,
+ "step": 849
+ },
+ {
+ "epoch": 2.6576200417536535,
+ "grad_norm": 1.045470952987671,
+ "learning_rate": 3.171409872613278e-06,
+ "loss": 0.4789,
+ "step": 850
+ },
+ {
+ "epoch": 2.6607515657620042,
+ "grad_norm": 0.8823987245559692,
+ "learning_rate": 3.1672382829473997e-06,
+ "loss": 0.5117,
+ "step": 851
+ },
+ {
+ "epoch": 2.663883089770355,
+ "grad_norm": 0.7395204901695251,
+ "learning_rate": 3.163064692009944e-06,
+ "loss": 0.5476,
+ "step": 852
+ },
+ {
+ "epoch": 2.6670146137787056,
+ "grad_norm": 0.7778941988945007,
+ "learning_rate": 3.1588891123189103e-06,
+ "loss": 0.5092,
+ "step": 853
+ },
+ {
+ "epoch": 2.6701461377870563,
+ "grad_norm": 0.8072531819343567,
+ "learning_rate": 3.1547115563982643e-06,
+ "loss": 0.4961,
+ "step": 854
+ },
+ {
+ "epoch": 2.673277661795407,
+ "grad_norm": 0.9018139243125916,
+ "learning_rate": 3.1505320367778993e-06,
+ "loss": 0.4624,
+ "step": 855
+ },
+ {
+ "epoch": 2.6764091858037578,
+ "grad_norm": 0.8554450869560242,
+ "learning_rate": 3.1463505659935957e-06,
+ "loss": 0.4971,
+ "step": 856
+ },
+ {
+ "epoch": 2.6795407098121085,
+ "grad_norm": 0.7727259397506714,
+ "learning_rate": 3.14216715658699e-06,
+ "loss": 0.4544,
+ "step": 857
+ },
+ {
+ "epoch": 2.682672233820459,
+ "grad_norm": 0.9253409504890442,
+ "learning_rate": 3.137981821105529e-06,
+ "loss": 0.4893,
+ "step": 858
+ },
+ {
+ "epoch": 2.68580375782881,
+ "grad_norm": 0.8809456825256348,
+ "learning_rate": 3.1337945721024403e-06,
+ "loss": 0.5242,
+ "step": 859
+ },
+ {
+ "epoch": 2.6889352818371606,
+ "grad_norm": 0.981755256652832,
+ "learning_rate": 3.129605422136689e-06,
+ "loss": 0.4686,
+ "step": 860
+ },
+ {
+ "epoch": 2.6920668058455117,
+ "grad_norm": 1.1278467178344727,
+ "learning_rate": 3.1254143837729412e-06,
+ "loss": 0.4813,
+ "step": 861
+ },
+ {
+ "epoch": 2.695198329853862,
+ "grad_norm": 0.8529123663902283,
+ "learning_rate": 3.1212214695815285e-06,
+ "loss": 0.4723,
+ "step": 862
+ },
+ {
+ "epoch": 2.698329853862213,
+ "grad_norm": 0.7764189839363098,
+ "learning_rate": 3.1170266921384075e-06,
+ "loss": 0.4777,
+ "step": 863
+ },
+ {
+ "epoch": 2.701461377870564,
+ "grad_norm": 0.7364740967750549,
+ "learning_rate": 3.112830064025124e-06,
+ "loss": 0.4975,
+ "step": 864
+ },
+ {
+ "epoch": 2.7045929018789145,
+ "grad_norm": 0.7594549059867859,
+ "learning_rate": 3.108631597828774e-06,
+ "loss": 0.5083,
+ "step": 865
+ },
+ {
+ "epoch": 2.7077244258872653,
+ "grad_norm": 0.7337073683738708,
+ "learning_rate": 3.104431306141968e-06,
+ "loss": 0.4778,
+ "step": 866
+ },
+ {
+ "epoch": 2.710855949895616,
+ "grad_norm": 0.7709932327270508,
+ "learning_rate": 3.1002292015627894e-06,
+ "loss": 0.4754,
+ "step": 867
+ },
+ {
+ "epoch": 2.7139874739039667,
+ "grad_norm": 0.8001313209533691,
+ "learning_rate": 3.0960252966947605e-06,
+ "loss": 0.4489,
+ "step": 868
+ },
+ {
+ "epoch": 2.7171189979123174,
+ "grad_norm": 0.8280592560768127,
+ "learning_rate": 3.091819604146804e-06,
+ "loss": 0.4606,
+ "step": 869
+ },
+ {
+ "epoch": 2.720250521920668,
+ "grad_norm": 0.7463534474372864,
+ "learning_rate": 3.0876121365332024e-06,
+ "loss": 0.5168,
+ "step": 870
+ },
+ {
+ "epoch": 2.723382045929019,
+ "grad_norm": 0.9011222124099731,
+ "learning_rate": 3.0834029064735636e-06,
+ "loss": 0.5163,
+ "step": 871
+ },
+ {
+ "epoch": 2.7265135699373695,
+ "grad_norm": 0.7811456322669983,
+ "learning_rate": 3.0791919265927827e-06,
+ "loss": 0.5004,
+ "step": 872
+ },
+ {
+ "epoch": 2.72964509394572,
+ "grad_norm": 0.9251837134361267,
+ "learning_rate": 3.0749792095210003e-06,
+ "loss": 0.5081,
+ "step": 873
+ },
+ {
+ "epoch": 2.732776617954071,
+ "grad_norm": 0.8347085118293762,
+ "learning_rate": 3.0707647678935695e-06,
+ "loss": 0.4793,
+ "step": 874
+ },
+ {
+ "epoch": 2.7359081419624216,
+ "grad_norm": 0.9766442179679871,
+ "learning_rate": 3.0665486143510153e-06,
+ "loss": 0.493,
+ "step": 875
+ },
+ {
+ "epoch": 2.7390396659707723,
+ "grad_norm": 0.7692548036575317,
+ "learning_rate": 3.0623307615389975e-06,
+ "loss": 0.4874,
+ "step": 876
+ },
+ {
+ "epoch": 2.742171189979123,
+ "grad_norm": 0.7714599370956421,
+ "learning_rate": 3.0581112221082727e-06,
+ "loss": 0.4929,
+ "step": 877
+ },
+ {
+ "epoch": 2.745302713987474,
+ "grad_norm": 0.7797786593437195,
+ "learning_rate": 3.053890008714655e-06,
+ "loss": 0.4359,
+ "step": 878
+ },
+ {
+ "epoch": 2.7484342379958244,
+ "grad_norm": 5.118397235870361,
+ "learning_rate": 3.049667134018981e-06,
+ "loss": 0.4634,
+ "step": 879
+ },
+ {
+ "epoch": 2.7515657620041756,
+ "grad_norm": 0.7684539556503296,
+ "learning_rate": 3.04544261068707e-06,
+ "loss": 0.4688,
+ "step": 880
+ },
+ {
+ "epoch": 2.754697286012526,
+ "grad_norm": 0.8678610920906067,
+ "learning_rate": 3.0412164513896846e-06,
+ "loss": 0.5213,
+ "step": 881
+ },
+ {
+ "epoch": 2.757828810020877,
+ "grad_norm": 0.80293869972229,
+ "learning_rate": 3.0369886688024954e-06,
+ "loss": 0.4392,
+ "step": 882
+ },
+ {
+ "epoch": 2.7609603340292277,
+ "grad_norm": 0.7438644766807556,
+ "learning_rate": 3.0327592756060412e-06,
+ "loss": 0.528,
+ "step": 883
+ },
+ {
+ "epoch": 2.7640918580375784,
+ "grad_norm": 0.7701645493507385,
+ "learning_rate": 3.0285282844856917e-06,
+ "loss": 0.504,
+ "step": 884
+ },
+ {
+ "epoch": 2.767223382045929,
+ "grad_norm": 0.7113856673240662,
+ "learning_rate": 3.024295708131611e-06,
+ "loss": 0.4819,
+ "step": 885
+ },
+ {
+ "epoch": 2.77035490605428,
+ "grad_norm": 1.2697532176971436,
+ "learning_rate": 3.020061559238714e-06,
+ "loss": 0.5009,
+ "step": 886
+ },
+ {
+ "epoch": 2.7734864300626305,
+ "grad_norm": 1.0299439430236816,
+ "learning_rate": 3.015825850506636e-06,
+ "loss": 0.4707,
+ "step": 887
+ },
+ {
+ "epoch": 2.776617954070981,
+ "grad_norm": 0.9703660607337952,
+ "learning_rate": 3.011588594639688e-06,
+ "loss": 0.4102,
+ "step": 888
+ },
+ {
+ "epoch": 2.779749478079332,
+ "grad_norm": 0.7357314825057983,
+ "learning_rate": 3.0073498043468247e-06,
+ "loss": 0.4649,
+ "step": 889
+ },
+ {
+ "epoch": 2.7828810020876826,
+ "grad_norm": 0.7815471291542053,
+ "learning_rate": 3.0031094923415993e-06,
+ "loss": 0.469,
+ "step": 890
+ },
+ {
+ "epoch": 2.7860125260960333,
+ "grad_norm": 0.7856019139289856,
+ "learning_rate": 2.9988676713421318e-06,
+ "loss": 0.4241,
+ "step": 891
+ },
+ {
+ "epoch": 2.789144050104384,
+ "grad_norm": 0.7668167352676392,
+ "learning_rate": 2.994624354071066e-06,
+ "loss": 0.5309,
+ "step": 892
+ },
+ {
+ "epoch": 2.7922755741127347,
+ "grad_norm": 0.7485945820808411,
+ "learning_rate": 2.990379553255535e-06,
+ "loss": 0.5173,
+ "step": 893
+ },
+ {
+ "epoch": 2.7954070981210855,
+ "grad_norm": 0.8065824508666992,
+ "learning_rate": 2.986133281627123e-06,
+ "loss": 0.4995,
+ "step": 894
+ },
+ {
+ "epoch": 2.798538622129436,
+ "grad_norm": 0.7156995534896851,
+ "learning_rate": 2.9818855519218217e-06,
+ "loss": 0.4642,
+ "step": 895
+ },
+ {
+ "epoch": 2.801670146137787,
+ "grad_norm": 0.9115403890609741,
+ "learning_rate": 2.97763637688e-06,
+ "loss": 0.4799,
+ "step": 896
+ },
+ {
+ "epoch": 2.804801670146138,
+ "grad_norm": 0.7466689944267273,
+ "learning_rate": 2.9733857692463584e-06,
+ "loss": 0.4942,
+ "step": 897
+ },
+ {
+ "epoch": 2.8079331941544883,
+ "grad_norm": 0.7484914064407349,
+ "learning_rate": 2.9691337417698974e-06,
+ "loss": 0.4618,
+ "step": 898
+ },
+ {
+ "epoch": 2.8110647181628394,
+ "grad_norm": 0.816704511642456,
+ "learning_rate": 2.9648803072038736e-06,
+ "loss": 0.4748,
+ "step": 899
+ },
+ {
+ "epoch": 2.81419624217119,
+ "grad_norm": 0.7627584934234619,
+ "learning_rate": 2.9606254783057666e-06,
+ "loss": 0.4667,
+ "step": 900
+ },
+ {
+ "epoch": 2.817327766179541,
+ "grad_norm": 0.7341011166572571,
+ "learning_rate": 2.9563692678372342e-06,
+ "loss": 0.4802,
+ "step": 901
+ },
+ {
+ "epoch": 2.8204592901878915,
+ "grad_norm": 1.2541382312774658,
+ "learning_rate": 2.952111688564082e-06,
+ "loss": 0.5231,
+ "step": 902
+ },
+ {
+ "epoch": 2.8235908141962422,
+ "grad_norm": 0.7172819375991821,
+ "learning_rate": 2.9478527532562184e-06,
+ "loss": 0.4488,
+ "step": 903
+ },
+ {
+ "epoch": 2.826722338204593,
+ "grad_norm": 0.774529218673706,
+ "learning_rate": 2.943592474687621e-06,
+ "loss": 0.4964,
+ "step": 904
+ },
+ {
+ "epoch": 2.8298538622129437,
+ "grad_norm": 0.7315672636032104,
+ "learning_rate": 2.939330865636294e-06,
+ "loss": 0.4817,
+ "step": 905
+ },
+ {
+ "epoch": 2.8329853862212944,
+ "grad_norm": 0.7698234915733337,
+ "learning_rate": 2.9350679388842347e-06,
+ "loss": 0.5075,
+ "step": 906
+ },
+ {
+ "epoch": 2.836116910229645,
+ "grad_norm": 0.7717766761779785,
+ "learning_rate": 2.93080370721739e-06,
+ "loss": 0.4789,
+ "step": 907
+ },
+ {
+ "epoch": 2.8392484342379958,
+ "grad_norm": 0.7383570075035095,
+ "learning_rate": 2.926538183425622e-06,
+ "loss": 0.4992,
+ "step": 908
+ },
+ {
+ "epoch": 2.8423799582463465,
+ "grad_norm": 0.7858864068984985,
+ "learning_rate": 2.92227138030267e-06,
+ "loss": 0.4993,
+ "step": 909
+ },
+ {
+ "epoch": 2.845511482254697,
+ "grad_norm": 0.8220369219779968,
+ "learning_rate": 2.9180033106461076e-06,
+ "loss": 0.4929,
+ "step": 910
+ },
+ {
+ "epoch": 2.848643006263048,
+ "grad_norm": 0.7507152557373047,
+ "learning_rate": 2.9137339872573086e-06,
+ "loss": 0.4394,
+ "step": 911
+ },
+ {
+ "epoch": 2.8517745302713986,
+ "grad_norm": 0.7935269474983215,
+ "learning_rate": 2.9094634229414063e-06,
+ "loss": 0.4656,
+ "step": 912
+ },
+ {
+ "epoch": 2.8549060542797493,
+ "grad_norm": 0.9187721610069275,
+ "learning_rate": 2.9051916305072576e-06,
+ "loss": 0.4918,
+ "step": 913
+ },
+ {
+ "epoch": 2.8580375782881005,
+ "grad_norm": 0.8699706792831421,
+ "learning_rate": 2.9009186227674e-06,
+ "loss": 0.5106,
+ "step": 914
+ },
+ {
+ "epoch": 2.8611691022964507,
+ "grad_norm": 0.7175673246383667,
+ "learning_rate": 2.896644412538021e-06,
+ "loss": 0.5105,
+ "step": 915
+ },
+ {
+ "epoch": 2.864300626304802,
+ "grad_norm": 0.8563990592956543,
+ "learning_rate": 2.892369012638909e-06,
+ "loss": 0.4993,
+ "step": 916
+ },
+ {
+ "epoch": 2.867432150313152,
+ "grad_norm": 0.7891882658004761,
+ "learning_rate": 2.8880924358934246e-06,
+ "loss": 0.4983,
+ "step": 917
+ },
+ {
+ "epoch": 2.8705636743215033,
+ "grad_norm": 0.9247110486030579,
+ "learning_rate": 2.8838146951284575e-06,
+ "loss": 0.4789,
+ "step": 918
+ },
+ {
+ "epoch": 2.873695198329854,
+ "grad_norm": 0.7523055672645569,
+ "learning_rate": 2.879535803174387e-06,
+ "loss": 0.4982,
+ "step": 919
+ },
+ {
+ "epoch": 2.8768267223382047,
+ "grad_norm": 0.8096909523010254,
+ "learning_rate": 2.8752557728650467e-06,
+ "loss": 0.4958,
+ "step": 920
+ },
+ {
+ "epoch": 2.8799582463465554,
+ "grad_norm": 2.3476874828338623,
+ "learning_rate": 2.870974617037684e-06,
+ "loss": 0.491,
+ "step": 921
+ },
+ {
+ "epoch": 2.883089770354906,
+ "grad_norm": 0.8388578295707703,
+ "learning_rate": 2.8666923485329224e-06,
+ "loss": 0.5275,
+ "step": 922
+ },
+ {
+ "epoch": 2.886221294363257,
+ "grad_norm": 0.8162729144096375,
+ "learning_rate": 2.8624089801947234e-06,
+ "loss": 0.4776,
+ "step": 923
+ },
+ {
+ "epoch": 2.8893528183716075,
+ "grad_norm": 0.7306103110313416,
+ "learning_rate": 2.858124524870345e-06,
+ "loss": 0.4814,
+ "step": 924
+ },
+ {
+ "epoch": 2.892484342379958,
+ "grad_norm": 0.8736817836761475,
+ "learning_rate": 2.853838995410307e-06,
+ "loss": 0.5097,
+ "step": 925
+ },
+ {
+ "epoch": 2.895615866388309,
+ "grad_norm": 0.7771823406219482,
+ "learning_rate": 2.8495524046683525e-06,
+ "loss": 0.4806,
+ "step": 926
+ },
+ {
+ "epoch": 2.8987473903966596,
+ "grad_norm": 0.9421334862709045,
+ "learning_rate": 2.845264765501404e-06,
+ "loss": 0.5055,
+ "step": 927
+ },
+ {
+ "epoch": 2.9018789144050103,
+ "grad_norm": 0.8403921127319336,
+ "learning_rate": 2.8409760907695314e-06,
+ "loss": 0.4775,
+ "step": 928
+ },
+ {
+ "epoch": 2.905010438413361,
+ "grad_norm": 0.8095362186431885,
+ "learning_rate": 2.836686393335909e-06,
+ "loss": 0.4532,
+ "step": 929
+ },
+ {
+ "epoch": 2.9081419624217117,
+ "grad_norm": 0.7340645790100098,
+ "learning_rate": 2.8323956860667813e-06,
+ "loss": 0.4835,
+ "step": 930
+ },
+ {
+ "epoch": 2.911273486430063,
+ "grad_norm": 0.6970911026000977,
+ "learning_rate": 2.828103981831417e-06,
+ "loss": 0.4999,
+ "step": 931
+ },
+ {
+ "epoch": 2.914405010438413,
+ "grad_norm": 0.8136418461799622,
+ "learning_rate": 2.8238112935020794e-06,
+ "loss": 0.5038,
+ "step": 932
+ },
+ {
+ "epoch": 2.9175365344467643,
+ "grad_norm": 0.9045608043670654,
+ "learning_rate": 2.8195176339539816e-06,
+ "loss": 0.486,
+ "step": 933
+ },
+ {
+ "epoch": 2.9206680584551146,
+ "grad_norm": 1.14940345287323,
+ "learning_rate": 2.815223016065249e-06,
+ "loss": 0.5079,
+ "step": 934
+ },
+ {
+ "epoch": 2.9237995824634657,
+ "grad_norm": 0.7411190867424011,
+ "learning_rate": 2.8109274527168826e-06,
+ "loss": 0.4564,
+ "step": 935
+ },
+ {
+ "epoch": 2.9269311064718164,
+ "grad_norm": 0.8903455138206482,
+ "learning_rate": 2.806630956792719e-06,
+ "loss": 0.451,
+ "step": 936
+ },
+ {
+ "epoch": 2.930062630480167,
+ "grad_norm": 0.7865445017814636,
+ "learning_rate": 2.8023335411793904e-06,
+ "loss": 0.4658,
+ "step": 937
+ },
+ {
+ "epoch": 2.933194154488518,
+ "grad_norm": 0.8185790777206421,
+ "learning_rate": 2.798035218766292e-06,
+ "loss": 0.4776,
+ "step": 938
+ },
+ {
+ "epoch": 2.9363256784968685,
+ "grad_norm": 0.7516276836395264,
+ "learning_rate": 2.793736002445531e-06,
+ "loss": 0.4447,
+ "step": 939
+ },
+ {
+ "epoch": 2.9394572025052192,
+ "grad_norm": 0.738080620765686,
+ "learning_rate": 2.789435905111903e-06,
+ "loss": 0.4832,
+ "step": 940
+ },
+ {
+ "epoch": 2.94258872651357,
+ "grad_norm": 0.7971507906913757,
+ "learning_rate": 2.785134939662843e-06,
+ "loss": 0.4835,
+ "step": 941
+ },
+ {
+ "epoch": 2.9457202505219207,
+ "grad_norm": 0.7529093623161316,
+ "learning_rate": 2.78083311899839e-06,
+ "loss": 0.4759,
+ "step": 942
+ },
+ {
+ "epoch": 2.9488517745302714,
+ "grad_norm": 0.8222358226776123,
+ "learning_rate": 2.7765304560211482e-06,
+ "loss": 0.4365,
+ "step": 943
+ },
+ {
+ "epoch": 2.951983298538622,
+ "grad_norm": 0.729945182800293,
+ "learning_rate": 2.7722269636362462e-06,
+ "loss": 0.5026,
+ "step": 944
+ },
+ {
+ "epoch": 2.9551148225469728,
+ "grad_norm": 0.7287900447845459,
+ "learning_rate": 2.767922654751306e-06,
+ "loss": 0.4916,
+ "step": 945
+ },
+ {
+ "epoch": 2.9582463465553235,
+ "grad_norm": 0.869637131690979,
+ "learning_rate": 2.763617542276391e-06,
+ "loss": 0.5018,
+ "step": 946
+ },
+ {
+ "epoch": 2.961377870563674,
+ "grad_norm": 1.004909634590149,
+ "learning_rate": 2.7593116391239806e-06,
+ "loss": 0.5152,
+ "step": 947
+ },
+ {
+ "epoch": 2.964509394572025,
+ "grad_norm": 0.8263046145439148,
+ "learning_rate": 2.7550049582089235e-06,
+ "loss": 0.5249,
+ "step": 948
+ },
+ {
+ "epoch": 2.9676409185803756,
+ "grad_norm": 0.7963895797729492,
+ "learning_rate": 2.750697512448401e-06,
+ "loss": 0.5084,
+ "step": 949
+ },
+ {
+ "epoch": 2.9707724425887267,
+ "grad_norm": 0.7211249470710754,
+ "learning_rate": 2.7463893147618893e-06,
+ "loss": 0.4691,
+ "step": 950
+ },
+ {
+ "epoch": 2.973903966597077,
+ "grad_norm": 0.8010216951370239,
+ "learning_rate": 2.742080378071118e-06,
+ "loss": 0.5026,
+ "step": 951
+ },
+ {
+ "epoch": 2.977035490605428,
+ "grad_norm": 0.780078649520874,
+ "learning_rate": 2.7377707153000356e-06,
+ "loss": 0.4758,
+ "step": 952
+ },
+ {
+ "epoch": 2.980167014613779,
+ "grad_norm": 0.7728193998336792,
+ "learning_rate": 2.7334603393747684e-06,
+ "loss": 0.488,
+ "step": 953
+ },
+ {
+ "epoch": 2.9832985386221296,
+ "grad_norm": 0.836329996585846,
+ "learning_rate": 2.7291492632235777e-06,
+ "loss": 0.456,
+ "step": 954
+ },
+ {
+ "epoch": 2.9864300626304803,
+ "grad_norm": 0.7241990566253662,
+ "learning_rate": 2.724837499776831e-06,
+ "loss": 0.4953,
+ "step": 955
+ },
+ {
+ "epoch": 2.989561586638831,
+ "grad_norm": 0.7595076560974121,
+ "learning_rate": 2.7205250619669527e-06,
+ "loss": 0.446,
+ "step": 956
+ },
+ {
+ "epoch": 2.9926931106471817,
+ "grad_norm": 0.8177686333656311,
+ "learning_rate": 2.716211962728392e-06,
+ "loss": 0.5057,
+ "step": 957
+ },
+ {
+ "epoch": 2.9958246346555324,
+ "grad_norm": 0.7506977915763855,
+ "learning_rate": 2.71189821499758e-06,
+ "loss": 0.4821,
+ "step": 958
+ },
+ {
+ "epoch": 2.998956158663883,
+ "grad_norm": 0.8085163235664368,
+ "learning_rate": 2.7075838317128943e-06,
+ "loss": 0.5002,
+ "step": 959
+ },
+ {
+ "epoch": 3.0,
+ "grad_norm": 3.3674418926239014,
+ "learning_rate": 2.7032688258146207e-06,
+ "loss": 0.183,
+ "step": 960
+ },
+ {
+ "epoch": 3.0031315240083507,
+ "grad_norm": 1.066100835800171,
+ "learning_rate": 2.698953210244908e-06,
+ "loss": 0.4427,
+ "step": 961
+ },
+ {
+ "epoch": 3.0062630480167014,
+ "grad_norm": 0.7690210938453674,
+ "learning_rate": 2.6946369979477365e-06,
+ "loss": 0.4638,
+ "step": 962
+ },
+ {
+ "epoch": 3.009394572025052,
+ "grad_norm": 0.8114679455757141,
+ "learning_rate": 2.690320201868876e-06,
+ "loss": 0.4373,
+ "step": 963
+ },
+ {
+ "epoch": 3.012526096033403,
+ "grad_norm": 0.7680971622467041,
+ "learning_rate": 2.686002834955847e-06,
+ "loss": 0.4477,
+ "step": 964
+ },
+ {
+ "epoch": 3.0156576200417535,
+ "grad_norm": 0.7194678783416748,
+ "learning_rate": 2.6816849101578808e-06,
+ "loss": 0.455,
+ "step": 965
+ },
+ {
+ "epoch": 3.0187891440501042,
+ "grad_norm": 0.6890467405319214,
+ "learning_rate": 2.6773664404258854e-06,
+ "loss": 0.4246,
+ "step": 966
+ },
+ {
+ "epoch": 3.021920668058455,
+ "grad_norm": 0.8064301013946533,
+ "learning_rate": 2.6730474387123987e-06,
+ "loss": 0.497,
+ "step": 967
+ },
+ {
+ "epoch": 3.0250521920668056,
+ "grad_norm": 0.8164849281311035,
+ "learning_rate": 2.668727917971559e-06,
+ "loss": 0.457,
+ "step": 968
+ },
+ {
+ "epoch": 3.028183716075157,
+ "grad_norm": 0.7793440818786621,
+ "learning_rate": 2.6644078911590565e-06,
+ "loss": 0.4632,
+ "step": 969
+ },
+ {
+ "epoch": 3.0313152400835075,
+ "grad_norm": 0.7208535671234131,
+ "learning_rate": 2.6600873712321033e-06,
+ "loss": 0.4786,
+ "step": 970
+ },
+ {
+ "epoch": 3.034446764091858,
+ "grad_norm": 0.8905500769615173,
+ "learning_rate": 2.655766371149389e-06,
+ "loss": 0.4317,
+ "step": 971
+ },
+ {
+ "epoch": 3.037578288100209,
+ "grad_norm": 0.7537338733673096,
+ "learning_rate": 2.6514449038710418e-06,
+ "loss": 0.4783,
+ "step": 972
+ },
+ {
+ "epoch": 3.0407098121085596,
+ "grad_norm": 0.7901656031608582,
+ "learning_rate": 2.6471229823585937e-06,
+ "loss": 0.4762,
+ "step": 973
+ },
+ {
+ "epoch": 3.0438413361169103,
+ "grad_norm": 0.7427340745925903,
+ "learning_rate": 2.6428006195749373e-06,
+ "loss": 0.4782,
+ "step": 974
+ },
+ {
+ "epoch": 3.046972860125261,
+ "grad_norm": 0.8364680409431458,
+ "learning_rate": 2.6384778284842905e-06,
+ "loss": 0.4551,
+ "step": 975
+ },
+ {
+ "epoch": 3.0501043841336117,
+ "grad_norm": 1.1049500703811646,
+ "learning_rate": 2.634154622052155e-06,
+ "loss": 0.4451,
+ "step": 976
+ },
+ {
+ "epoch": 3.0532359081419624,
+ "grad_norm": 1.0797414779663086,
+ "learning_rate": 2.6298310132452757e-06,
+ "loss": 0.4512,
+ "step": 977
+ },
+ {
+ "epoch": 3.056367432150313,
+ "grad_norm": 0.8832088708877563,
+ "learning_rate": 2.62550701503161e-06,
+ "loss": 0.4569,
+ "step": 978
+ },
+ {
+ "epoch": 3.059498956158664,
+ "grad_norm": 0.748951256275177,
+ "learning_rate": 2.621182640380277e-06,
+ "loss": 0.4535,
+ "step": 979
+ },
+ {
+ "epoch": 3.0626304801670146,
+ "grad_norm": 0.776386022567749,
+ "learning_rate": 2.616857902261529e-06,
+ "loss": 0.4582,
+ "step": 980
+ },
+ {
+ "epoch": 3.0657620041753653,
+ "grad_norm": 0.730057418346405,
+ "learning_rate": 2.6125328136467074e-06,
+ "loss": 0.4581,
+ "step": 981
+ },
+ {
+ "epoch": 3.068893528183716,
+ "grad_norm": 0.7918877005577087,
+ "learning_rate": 2.6082073875082046e-06,
+ "loss": 0.4355,
+ "step": 982
+ },
+ {
+ "epoch": 3.0720250521920667,
+ "grad_norm": 0.803987443447113,
+ "learning_rate": 2.6038816368194265e-06,
+ "loss": 0.455,
+ "step": 983
+ },
+ {
+ "epoch": 3.0751565762004174,
+ "grad_norm": 1.2970365285873413,
+ "learning_rate": 2.599555574554749e-06,
+ "loss": 0.448,
+ "step": 984
+ },
+ {
+ "epoch": 3.078288100208768,
+ "grad_norm": 0.7814194560050964,
+ "learning_rate": 2.595229213689487e-06,
+ "loss": 0.4197,
+ "step": 985
+ },
+ {
+ "epoch": 3.081419624217119,
+ "grad_norm": 0.9865803122520447,
+ "learning_rate": 2.5909025671998483e-06,
+ "loss": 0.4649,
+ "step": 986
+ },
+ {
+ "epoch": 3.08455114822547,
+ "grad_norm": 0.7769168615341187,
+ "learning_rate": 2.586575648062898e-06,
+ "loss": 0.4287,
+ "step": 987
+ },
+ {
+ "epoch": 3.0876826722338206,
+ "grad_norm": 1.454988956451416,
+ "learning_rate": 2.582248469256519e-06,
+ "loss": 0.4548,
+ "step": 988
+ },
+ {
+ "epoch": 3.0908141962421714,
+ "grad_norm": 0.7656151056289673,
+ "learning_rate": 2.577921043759372e-06,
+ "loss": 0.4323,
+ "step": 989
+ },
+ {
+ "epoch": 3.093945720250522,
+ "grad_norm": 0.7989768385887146,
+ "learning_rate": 2.5735933845508598e-06,
+ "loss": 0.4616,
+ "step": 990
+ },
+ {
+ "epoch": 3.0970772442588728,
+ "grad_norm": 0.8711255192756653,
+ "learning_rate": 2.5692655046110855e-06,
+ "loss": 0.4923,
+ "step": 991
+ },
+ {
+ "epoch": 3.1002087682672235,
+ "grad_norm": 0.7333446145057678,
+ "learning_rate": 2.564937416920813e-06,
+ "loss": 0.4297,
+ "step": 992
+ },
+ {
+ "epoch": 3.103340292275574,
+ "grad_norm": 0.7611051201820374,
+ "learning_rate": 2.5606091344614297e-06,
+ "loss": 0.4276,
+ "step": 993
+ },
+ {
+ "epoch": 3.106471816283925,
+ "grad_norm": 0.9742305278778076,
+ "learning_rate": 2.5562806702149083e-06,
+ "loss": 0.4597,
+ "step": 994
+ },
+ {
+ "epoch": 3.1096033402922756,
+ "grad_norm": 0.8043314218521118,
+ "learning_rate": 2.551952037163765e-06,
+ "loss": 0.459,
+ "step": 995
+ },
+ {
+ "epoch": 3.1127348643006263,
+ "grad_norm": 0.7217118740081787,
+ "learning_rate": 2.5476232482910253e-06,
+ "loss": 0.4514,
+ "step": 996
+ },
+ {
+ "epoch": 3.115866388308977,
+ "grad_norm": 1.1410573720932007,
+ "learning_rate": 2.5432943165801765e-06,
+ "loss": 0.4942,
+ "step": 997
+ },
+ {
+ "epoch": 3.1189979123173277,
+ "grad_norm": 0.7188895344734192,
+ "learning_rate": 2.5389652550151416e-06,
+ "loss": 0.4641,
+ "step": 998
+ },
+ {
+ "epoch": 3.1221294363256784,
+ "grad_norm": 0.7223486304283142,
+ "learning_rate": 2.5346360765802276e-06,
+ "loss": 0.4382,
+ "step": 999
+ },
+ {
+ "epoch": 3.125260960334029,
+ "grad_norm": 0.8269757032394409,
+ "learning_rate": 2.5303067942600933e-06,
+ "loss": 0.4502,
+ "step": 1000
+ },
+ {
+ "epoch": 3.12839248434238,
+ "grad_norm": 0.7081235647201538,
+ "learning_rate": 2.5259774210397104e-06,
+ "loss": 0.4389,
+ "step": 1001
+ },
+ {
+ "epoch": 3.1315240083507305,
+ "grad_norm": 0.7493525147438049,
+ "learning_rate": 2.5216479699043224e-06,
+ "loss": 0.4697,
+ "step": 1002
+ },
+ {
+ "epoch": 3.1346555323590812,
+ "grad_norm": 0.7633835077285767,
+ "learning_rate": 2.5173184538394054e-06,
+ "loss": 0.4535,
+ "step": 1003
+ },
+ {
+ "epoch": 3.137787056367432,
+ "grad_norm": 0.7765631079673767,
+ "learning_rate": 2.5129888858306333e-06,
+ "loss": 0.4616,
+ "step": 1004
+ },
+ {
+ "epoch": 3.140918580375783,
+ "grad_norm": 0.7026706337928772,
+ "learning_rate": 2.508659278863832e-06,
+ "loss": 0.4535,
+ "step": 1005
+ },
+ {
+ "epoch": 3.144050104384134,
+ "grad_norm": 0.8816738128662109,
+ "learning_rate": 2.5043296459249466e-06,
+ "loss": 0.4274,
+ "step": 1006
+ },
+ {
+ "epoch": 3.1471816283924845,
+ "grad_norm": 0.8072274327278137,
+ "learning_rate": 2.5e-06,
+ "loss": 0.4566,
+ "step": 1007
+ },
+ {
+ "epoch": 3.150313152400835,
+ "grad_norm": 0.7775781750679016,
+ "learning_rate": 2.4956703540750542e-06,
+ "loss": 0.4248,
+ "step": 1008
+ },
+ {
+ "epoch": 3.153444676409186,
+ "grad_norm": 0.9271306991577148,
+ "learning_rate": 2.4913407211361686e-06,
+ "loss": 0.4251,
+ "step": 1009
+ },
+ {
+ "epoch": 3.1565762004175366,
+ "grad_norm": 0.763867974281311,
+ "learning_rate": 2.487011114169368e-06,
+ "loss": 0.4525,
+ "step": 1010
+ },
+ {
+ "epoch": 3.1597077244258873,
+ "grad_norm": 0.8754820227622986,
+ "learning_rate": 2.4826815461605955e-06,
+ "loss": 0.4524,
+ "step": 1011
+ },
+ {
+ "epoch": 3.162839248434238,
+ "grad_norm": 0.8261513113975525,
+ "learning_rate": 2.4783520300956784e-06,
+ "loss": 0.4303,
+ "step": 1012
+ },
+ {
+ "epoch": 3.1659707724425887,
+ "grad_norm": 0.8629854917526245,
+ "learning_rate": 2.4740225789602905e-06,
+ "loss": 0.4628,
+ "step": 1013
+ },
+ {
+ "epoch": 3.1691022964509394,
+ "grad_norm": 0.7281518578529358,
+ "learning_rate": 2.469693205739907e-06,
+ "loss": 0.461,
+ "step": 1014
+ },
+ {
+ "epoch": 3.17223382045929,
+ "grad_norm": 0.757644772529602,
+ "learning_rate": 2.465363923419774e-06,
+ "loss": 0.4455,
+ "step": 1015
+ },
+ {
+ "epoch": 3.175365344467641,
+ "grad_norm": 0.8021314740180969,
+ "learning_rate": 2.4610347449848592e-06,
+ "loss": 0.4755,
+ "step": 1016
+ },
+ {
+ "epoch": 3.1784968684759916,
+ "grad_norm": 0.7467564940452576,
+ "learning_rate": 2.456705683419824e-06,
+ "loss": 0.4419,
+ "step": 1017
+ },
+ {
+ "epoch": 3.1816283924843423,
+ "grad_norm": 0.8575125336647034,
+ "learning_rate": 2.452376751708976e-06,
+ "loss": 0.4577,
+ "step": 1018
+ },
+ {
+ "epoch": 3.184759916492693,
+ "grad_norm": 0.8101590871810913,
+ "learning_rate": 2.448047962836235e-06,
+ "loss": 0.4546,
+ "step": 1019
+ },
+ {
+ "epoch": 3.1878914405010437,
+ "grad_norm": 0.8146190643310547,
+ "learning_rate": 2.443719329785093e-06,
+ "loss": 0.4219,
+ "step": 1020
+ },
+ {
+ "epoch": 3.1910229645093944,
+ "grad_norm": 1.2800556421279907,
+ "learning_rate": 2.4393908655385708e-06,
+ "loss": 0.4502,
+ "step": 1021
+ },
+ {
+ "epoch": 3.1941544885177455,
+ "grad_norm": 0.7362221479415894,
+ "learning_rate": 2.4350625830791875e-06,
+ "loss": 0.4802,
+ "step": 1022
+ },
+ {
+ "epoch": 3.1972860125260962,
+ "grad_norm": 0.8307201862335205,
+ "learning_rate": 2.430734495388915e-06,
+ "loss": 0.4808,
+ "step": 1023
+ },
+ {
+ "epoch": 3.200417536534447,
+ "grad_norm": 0.8467394709587097,
+ "learning_rate": 2.42640661544914e-06,
+ "loss": 0.4598,
+ "step": 1024
+ },
+ {
+ "epoch": 3.2035490605427976,
+ "grad_norm": 0.9570673108100891,
+ "learning_rate": 2.422078956240629e-06,
+ "loss": 0.4341,
+ "step": 1025
+ },
+ {
+ "epoch": 3.2066805845511483,
+ "grad_norm": 0.7976422309875488,
+ "learning_rate": 2.4177515307434824e-06,
+ "loss": 0.4285,
+ "step": 1026
+ },
+ {
+ "epoch": 3.209812108559499,
+ "grad_norm": 0.8241607546806335,
+ "learning_rate": 2.413424351937103e-06,
+ "loss": 0.4563,
+ "step": 1027
+ },
+ {
+ "epoch": 3.2129436325678498,
+ "grad_norm": 0.7191072702407837,
+ "learning_rate": 2.4090974328001526e-06,
+ "loss": 0.456,
+ "step": 1028
+ },
+ {
+ "epoch": 3.2160751565762005,
+ "grad_norm": 0.8112174868583679,
+ "learning_rate": 2.4047707863105133e-06,
+ "loss": 0.4592,
+ "step": 1029
+ },
+ {
+ "epoch": 3.219206680584551,
+ "grad_norm": 0.7170486450195312,
+ "learning_rate": 2.4004444254452522e-06,
+ "loss": 0.4559,
+ "step": 1030
+ },
+ {
+ "epoch": 3.222338204592902,
+ "grad_norm": 0.7092299461364746,
+ "learning_rate": 2.3961183631805748e-06,
+ "loss": 0.4504,
+ "step": 1031
+ },
+ {
+ "epoch": 3.2254697286012526,
+ "grad_norm": 0.8104662299156189,
+ "learning_rate": 2.391792612491796e-06,
+ "loss": 0.4327,
+ "step": 1032
+ },
+ {
+ "epoch": 3.2286012526096033,
+ "grad_norm": 0.8512858152389526,
+ "learning_rate": 2.387467186353293e-06,
+ "loss": 0.4506,
+ "step": 1033
+ },
+ {
+ "epoch": 3.231732776617954,
+ "grad_norm": 0.7987692952156067,
+ "learning_rate": 2.3831420977384715e-06,
+ "loss": 0.4257,
+ "step": 1034
+ },
+ {
+ "epoch": 3.2348643006263047,
+ "grad_norm": 0.7805537581443787,
+ "learning_rate": 2.3788173596197244e-06,
+ "loss": 0.4692,
+ "step": 1035
+ },
+ {
+ "epoch": 3.2379958246346554,
+ "grad_norm": 0.737304151058197,
+ "learning_rate": 2.374492984968392e-06,
+ "loss": 0.4308,
+ "step": 1036
+ },
+ {
+ "epoch": 3.241127348643006,
+ "grad_norm": 0.8113856315612793,
+ "learning_rate": 2.3701689867547247e-06,
+ "loss": 0.4668,
+ "step": 1037
+ },
+ {
+ "epoch": 3.244258872651357,
+ "grad_norm": 1.507103443145752,
+ "learning_rate": 2.3658453779478464e-06,
+ "loss": 0.4527,
+ "step": 1038
+ },
+ {
+ "epoch": 3.2473903966597075,
+ "grad_norm": 0.7973915338516235,
+ "learning_rate": 2.3615221715157095e-06,
+ "loss": 0.4741,
+ "step": 1039
+ },
+ {
+ "epoch": 3.2505219206680582,
+ "grad_norm": 0.8206940293312073,
+ "learning_rate": 2.3571993804250635e-06,
+ "loss": 0.4582,
+ "step": 1040
+ },
+ {
+ "epoch": 3.2536534446764094,
+ "grad_norm": 0.8519637584686279,
+ "learning_rate": 2.3528770176414076e-06,
+ "loss": 0.4412,
+ "step": 1041
+ },
+ {
+ "epoch": 3.25678496868476,
+ "grad_norm": 0.7542241811752319,
+ "learning_rate": 2.348555096128959e-06,
+ "loss": 0.4861,
+ "step": 1042
+ },
+ {
+ "epoch": 3.259916492693111,
+ "grad_norm": 0.8074842691421509,
+ "learning_rate": 2.3442336288506125e-06,
+ "loss": 0.4707,
+ "step": 1043
+ },
+ {
+ "epoch": 3.2630480167014615,
+ "grad_norm": 0.8164265751838684,
+ "learning_rate": 2.3399126287678975e-06,
+ "loss": 0.4417,
+ "step": 1044
+ },
+ {
+ "epoch": 3.266179540709812,
+ "grad_norm": 0.7689628005027771,
+ "learning_rate": 2.3355921088409435e-06,
+ "loss": 0.4815,
+ "step": 1045
+ },
+ {
+ "epoch": 3.269311064718163,
+ "grad_norm": 0.7709240913391113,
+ "learning_rate": 2.3312720820284423e-06,
+ "loss": 0.4444,
+ "step": 1046
+ },
+ {
+ "epoch": 3.2724425887265136,
+ "grad_norm": 0.7340330481529236,
+ "learning_rate": 2.326952561287602e-06,
+ "loss": 0.4694,
+ "step": 1047
+ },
+ {
+ "epoch": 3.2755741127348643,
+ "grad_norm": 0.7785805463790894,
+ "learning_rate": 2.3226335595741154e-06,
+ "loss": 0.4273,
+ "step": 1048
+ },
+ {
+ "epoch": 3.278705636743215,
+ "grad_norm": 0.9968108534812927,
+ "learning_rate": 2.3183150898421196e-06,
+ "loss": 0.4782,
+ "step": 1049
+ },
+ {
+ "epoch": 3.2818371607515657,
+ "grad_norm": 0.7823553681373596,
+ "learning_rate": 2.3139971650441533e-06,
+ "loss": 0.444,
+ "step": 1050
+ },
+ {
+ "epoch": 3.2849686847599164,
+ "grad_norm": 0.7317377924919128,
+ "learning_rate": 2.3096797981311252e-06,
+ "loss": 0.4692,
+ "step": 1051
+ },
+ {
+ "epoch": 3.288100208768267,
+ "grad_norm": 0.8546518683433533,
+ "learning_rate": 2.3053630020522643e-06,
+ "loss": 0.4632,
+ "step": 1052
+ },
+ {
+ "epoch": 3.291231732776618,
+ "grad_norm": 1.2284396886825562,
+ "learning_rate": 2.301046789755093e-06,
+ "loss": 0.4409,
+ "step": 1053
+ },
+ {
+ "epoch": 3.2943632567849686,
+ "grad_norm": 0.8000460863113403,
+ "learning_rate": 2.2967311741853797e-06,
+ "loss": 0.4456,
+ "step": 1054
+ },
+ {
+ "epoch": 3.2974947807933193,
+ "grad_norm": 0.7689793109893799,
+ "learning_rate": 2.2924161682871053e-06,
+ "loss": 0.45,
+ "step": 1055
+ },
+ {
+ "epoch": 3.30062630480167,
+ "grad_norm": 0.8032956719398499,
+ "learning_rate": 2.288101785002421e-06,
+ "loss": 0.4817,
+ "step": 1056
+ },
+ {
+ "epoch": 3.3037578288100207,
+ "grad_norm": 0.6831309795379639,
+ "learning_rate": 2.283788037271609e-06,
+ "loss": 0.4502,
+ "step": 1057
+ },
+ {
+ "epoch": 3.306889352818372,
+ "grad_norm": 0.8581221103668213,
+ "learning_rate": 2.279474938033048e-06,
+ "loss": 0.4569,
+ "step": 1058
+ },
+ {
+ "epoch": 3.3100208768267225,
+ "grad_norm": 0.7937221527099609,
+ "learning_rate": 2.2751625002231696e-06,
+ "loss": 0.4451,
+ "step": 1059
+ },
+ {
+ "epoch": 3.3131524008350732,
+ "grad_norm": 0.8095264434814453,
+ "learning_rate": 2.270850736776422e-06,
+ "loss": 0.4462,
+ "step": 1060
+ },
+ {
+ "epoch": 3.316283924843424,
+ "grad_norm": 0.9141370058059692,
+ "learning_rate": 2.2665396606252332e-06,
+ "loss": 0.419,
+ "step": 1061
+ },
+ {
+ "epoch": 3.3194154488517746,
+ "grad_norm": 0.8648553490638733,
+ "learning_rate": 2.262229284699965e-06,
+ "loss": 0.4562,
+ "step": 1062
+ },
+ {
+ "epoch": 3.3225469728601253,
+ "grad_norm": 0.7716917395591736,
+ "learning_rate": 2.2579196219288825e-06,
+ "loss": 0.4734,
+ "step": 1063
+ },
+ {
+ "epoch": 3.325678496868476,
+ "grad_norm": 0.8074535727500916,
+ "learning_rate": 2.2536106852381116e-06,
+ "loss": 0.4272,
+ "step": 1064
+ },
+ {
+ "epoch": 3.3288100208768268,
+ "grad_norm": 0.8989127278327942,
+ "learning_rate": 2.249302487551599e-06,
+ "loss": 0.4728,
+ "step": 1065
+ },
+ {
+ "epoch": 3.3319415448851775,
+ "grad_norm": 0.7662765383720398,
+ "learning_rate": 2.2449950417910777e-06,
+ "loss": 0.4457,
+ "step": 1066
+ },
+ {
+ "epoch": 3.335073068893528,
+ "grad_norm": 0.7789275050163269,
+ "learning_rate": 2.24068836087602e-06,
+ "loss": 0.3919,
+ "step": 1067
+ },
+ {
+ "epoch": 3.338204592901879,
+ "grad_norm": 0.7359098196029663,
+ "learning_rate": 2.2363824577236097e-06,
+ "loss": 0.4876,
+ "step": 1068
+ },
+ {
+ "epoch": 3.3413361169102296,
+ "grad_norm": 0.9960948824882507,
+ "learning_rate": 2.232077345248695e-06,
+ "loss": 0.4894,
+ "step": 1069
+ },
+ {
+ "epoch": 3.3444676409185803,
+ "grad_norm": 0.7446064352989197,
+ "learning_rate": 2.2277730363637537e-06,
+ "loss": 0.4765,
+ "step": 1070
+ },
+ {
+ "epoch": 3.347599164926931,
+ "grad_norm": 0.7674328088760376,
+ "learning_rate": 2.2234695439788534e-06,
+ "loss": 0.4468,
+ "step": 1071
+ },
+ {
+ "epoch": 3.3507306889352817,
+ "grad_norm": 0.8957347869873047,
+ "learning_rate": 2.2191668810016105e-06,
+ "loss": 0.4733,
+ "step": 1072
+ },
+ {
+ "epoch": 3.3538622129436324,
+ "grad_norm": 0.9110277891159058,
+ "learning_rate": 2.2148650603371573e-06,
+ "loss": 0.4399,
+ "step": 1073
+ },
+ {
+ "epoch": 3.356993736951983,
+ "grad_norm": 0.9158220291137695,
+ "learning_rate": 2.2105640948880976e-06,
+ "loss": 0.4609,
+ "step": 1074
+ },
+ {
+ "epoch": 3.3601252609603343,
+ "grad_norm": 0.7630184888839722,
+ "learning_rate": 2.206263997554469e-06,
+ "loss": 0.4674,
+ "step": 1075
+ },
+ {
+ "epoch": 3.3632567849686845,
+ "grad_norm": 0.7975273728370667,
+ "learning_rate": 2.20196478123371e-06,
+ "loss": 0.4478,
+ "step": 1076
+ },
+ {
+ "epoch": 3.3663883089770357,
+ "grad_norm": 0.8825351595878601,
+ "learning_rate": 2.19766645882061e-06,
+ "loss": 0.4687,
+ "step": 1077
+ },
+ {
+ "epoch": 3.3695198329853864,
+ "grad_norm": 0.8907671570777893,
+ "learning_rate": 2.1933690432072817e-06,
+ "loss": 0.4223,
+ "step": 1078
+ },
+ {
+ "epoch": 3.372651356993737,
+ "grad_norm": 0.7449545860290527,
+ "learning_rate": 2.189072547283118e-06,
+ "loss": 0.4595,
+ "step": 1079
+ },
+ {
+ "epoch": 3.375782881002088,
+ "grad_norm": 0.8460972309112549,
+ "learning_rate": 2.184776983934751e-06,
+ "loss": 0.4443,
+ "step": 1080
+ },
+ {
+ "epoch": 3.3789144050104385,
+ "grad_norm": 0.7524845600128174,
+ "learning_rate": 2.1804823660460196e-06,
+ "loss": 0.4235,
+ "step": 1081
+ },
+ {
+ "epoch": 3.382045929018789,
+ "grad_norm": 0.8448389768600464,
+ "learning_rate": 2.176188706497921e-06,
+ "loss": 0.4387,
+ "step": 1082
+ },
+ {
+ "epoch": 3.38517745302714,
+ "grad_norm": 0.7701981663703918,
+ "learning_rate": 2.1718960181685838e-06,
+ "loss": 0.4257,
+ "step": 1083
+ },
+ {
+ "epoch": 3.3883089770354906,
+ "grad_norm": 0.8178983330726624,
+ "learning_rate": 2.167604313933219e-06,
+ "loss": 0.4983,
+ "step": 1084
+ },
+ {
+ "epoch": 3.3914405010438413,
+ "grad_norm": 0.7477235198020935,
+ "learning_rate": 2.163313606664091e-06,
+ "loss": 0.4559,
+ "step": 1085
+ },
+ {
+ "epoch": 3.394572025052192,
+ "grad_norm": 0.8127962350845337,
+ "learning_rate": 2.1590239092304694e-06,
+ "loss": 0.453,
+ "step": 1086
+ },
+ {
+ "epoch": 3.3977035490605427,
+ "grad_norm": 0.7462339997291565,
+ "learning_rate": 2.1547352344985966e-06,
+ "loss": 0.4697,
+ "step": 1087
+ },
+ {
+ "epoch": 3.4008350730688934,
+ "grad_norm": 0.9641384482383728,
+ "learning_rate": 2.1504475953316483e-06,
+ "loss": 0.4495,
+ "step": 1088
+ },
+ {
+ "epoch": 3.403966597077244,
+ "grad_norm": 0.7612512707710266,
+ "learning_rate": 2.146161004589693e-06,
+ "loss": 0.4579,
+ "step": 1089
+ },
+ {
+ "epoch": 3.407098121085595,
+ "grad_norm": 0.7547829747200012,
+ "learning_rate": 2.141875475129655e-06,
+ "loss": 0.4334,
+ "step": 1090
+ },
+ {
+ "epoch": 3.4102296450939455,
+ "grad_norm": 0.8036953806877136,
+ "learning_rate": 2.137591019805278e-06,
+ "loss": 0.4466,
+ "step": 1091
+ },
+ {
+ "epoch": 3.4133611691022967,
+ "grad_norm": 0.7319284081459045,
+ "learning_rate": 2.1333076514670784e-06,
+ "loss": 0.4942,
+ "step": 1092
+ },
+ {
+ "epoch": 3.416492693110647,
+ "grad_norm": 0.8278589248657227,
+ "learning_rate": 2.1290253829623165e-06,
+ "loss": 0.4554,
+ "step": 1093
+ },
+ {
+ "epoch": 3.419624217118998,
+ "grad_norm": 0.733059287071228,
+ "learning_rate": 2.124744227134954e-06,
+ "loss": 0.4187,
+ "step": 1094
+ },
+ {
+ "epoch": 3.422755741127349,
+ "grad_norm": 0.8222727179527283,
+ "learning_rate": 2.1204641968256136e-06,
+ "loss": 0.4587,
+ "step": 1095
+ },
+ {
+ "epoch": 3.4258872651356995,
+ "grad_norm": 0.8296732902526855,
+ "learning_rate": 2.1161853048715438e-06,
+ "loss": 0.4868,
+ "step": 1096
+ },
+ {
+ "epoch": 3.4290187891440502,
+ "grad_norm": 0.7309690713882446,
+ "learning_rate": 2.1119075641065758e-06,
+ "loss": 0.4594,
+ "step": 1097
+ },
+ {
+ "epoch": 3.432150313152401,
+ "grad_norm": 1.4901788234710693,
+ "learning_rate": 2.1076309873610916e-06,
+ "loss": 0.4216,
+ "step": 1098
+ },
+ {
+ "epoch": 3.4352818371607516,
+ "grad_norm": 0.7993581891059875,
+ "learning_rate": 2.1033555874619794e-06,
+ "loss": 0.4842,
+ "step": 1099
+ },
+ {
+ "epoch": 3.4384133611691023,
+ "grad_norm": 0.8846752643585205,
+ "learning_rate": 2.0990813772325995e-06,
+ "loss": 0.4395,
+ "step": 1100
+ },
+ {
+ "epoch": 3.441544885177453,
+ "grad_norm": 1.0796778202056885,
+ "learning_rate": 2.0948083694927436e-06,
+ "loss": 0.4573,
+ "step": 1101
+ },
+ {
+ "epoch": 3.4446764091858038,
+ "grad_norm": 0.74623042345047,
+ "learning_rate": 2.090536577058595e-06,
+ "loss": 0.4563,
+ "step": 1102
+ },
+ {
+ "epoch": 3.4478079331941545,
+ "grad_norm": 0.8245521783828735,
+ "learning_rate": 2.086266012742692e-06,
+ "loss": 0.4433,
+ "step": 1103
+ },
+ {
+ "epoch": 3.450939457202505,
+ "grad_norm": 0.8003777265548706,
+ "learning_rate": 2.081996689353893e-06,
+ "loss": 0.4599,
+ "step": 1104
+ },
+ {
+ "epoch": 3.454070981210856,
+ "grad_norm": 0.8309001922607422,
+ "learning_rate": 2.0777286196973302e-06,
+ "loss": 0.485,
+ "step": 1105
+ },
+ {
+ "epoch": 3.4572025052192066,
+ "grad_norm": 0.8299122452735901,
+ "learning_rate": 2.0734618165743782e-06,
+ "loss": 0.4685,
+ "step": 1106
+ },
+ {
+ "epoch": 3.4603340292275573,
+ "grad_norm": 0.9347029328346252,
+ "learning_rate": 2.069196292782611e-06,
+ "loss": 0.4615,
+ "step": 1107
+ },
+ {
+ "epoch": 3.463465553235908,
+ "grad_norm": 0.7146593332290649,
+ "learning_rate": 2.064932061115766e-06,
+ "loss": 0.4433,
+ "step": 1108
+ },
+ {
+ "epoch": 3.4665970772442587,
+ "grad_norm": 0.7674420475959778,
+ "learning_rate": 2.0606691343637063e-06,
+ "loss": 0.4444,
+ "step": 1109
+ },
+ {
+ "epoch": 3.4697286012526094,
+ "grad_norm": 0.7925504446029663,
+ "learning_rate": 2.05640752531238e-06,
+ "loss": 0.4631,
+ "step": 1110
+ },
+ {
+ "epoch": 3.4728601252609606,
+ "grad_norm": 0.7755677700042725,
+ "learning_rate": 2.0521472467437825e-06,
+ "loss": 0.4709,
+ "step": 1111
+ },
+ {
+ "epoch": 3.4759916492693113,
+ "grad_norm": 0.8535795211791992,
+ "learning_rate": 2.0478883114359187e-06,
+ "loss": 0.442,
+ "step": 1112
+ },
+ {
+ "epoch": 3.479123173277662,
+ "grad_norm": 0.723953127861023,
+ "learning_rate": 2.043630732162767e-06,
+ "loss": 0.4782,
+ "step": 1113
+ },
+ {
+ "epoch": 3.4822546972860127,
+ "grad_norm": 0.7817316651344299,
+ "learning_rate": 2.0393745216942343e-06,
+ "loss": 0.4841,
+ "step": 1114
+ },
+ {
+ "epoch": 3.4853862212943634,
+ "grad_norm": 0.8878781795501709,
+ "learning_rate": 2.0351196927961268e-06,
+ "loss": 0.4673,
+ "step": 1115
+ },
+ {
+ "epoch": 3.488517745302714,
+ "grad_norm": 0.8645241856575012,
+ "learning_rate": 2.030866258230104e-06,
+ "loss": 0.432,
+ "step": 1116
+ },
+ {
+ "epoch": 3.491649269311065,
+ "grad_norm": 0.7294583320617676,
+ "learning_rate": 2.026614230753643e-06,
+ "loss": 0.4683,
+ "step": 1117
+ },
+ {
+ "epoch": 3.4947807933194155,
+ "grad_norm": 0.7412407994270325,
+ "learning_rate": 2.022363623120001e-06,
+ "loss": 0.4523,
+ "step": 1118
+ },
+ {
+ "epoch": 3.497912317327766,
+ "grad_norm": 0.8559291362762451,
+ "learning_rate": 2.0181144480781787e-06,
+ "loss": 0.4309,
+ "step": 1119
+ },
+ {
+ "epoch": 3.501043841336117,
+ "grad_norm": 0.7442825436592102,
+ "learning_rate": 2.0138667183728775e-06,
+ "loss": 0.4096,
+ "step": 1120
+ },
+ {
+ "epoch": 3.5041753653444676,
+ "grad_norm": 0.7605662941932678,
+ "learning_rate": 2.0096204467444645e-06,
+ "loss": 0.4404,
+ "step": 1121
+ },
+ {
+ "epoch": 3.5073068893528183,
+ "grad_norm": 0.7984277009963989,
+ "learning_rate": 2.005375645928935e-06,
+ "loss": 0.4661,
+ "step": 1122
+ },
+ {
+ "epoch": 3.510438413361169,
+ "grad_norm": 1.1044552326202393,
+ "learning_rate": 2.001132328657869e-06,
+ "loss": 0.4185,
+ "step": 1123
+ },
+ {
+ "epoch": 3.5135699373695197,
+ "grad_norm": 0.8210328817367554,
+ "learning_rate": 1.996890507658401e-06,
+ "loss": 0.4746,
+ "step": 1124
+ },
+ {
+ "epoch": 3.5167014613778704,
+ "grad_norm": 0.7302148342132568,
+ "learning_rate": 1.9926501956531758e-06,
+ "loss": 0.4333,
+ "step": 1125
+ },
+ {
+ "epoch": 3.519832985386221,
+ "grad_norm": 0.7713826894760132,
+ "learning_rate": 1.9884114053603114e-06,
+ "loss": 0.4485,
+ "step": 1126
+ },
+ {
+ "epoch": 3.522964509394572,
+ "grad_norm": 0.7386549711227417,
+ "learning_rate": 1.984174149493365e-06,
+ "loss": 0.4678,
+ "step": 1127
+ },
+ {
+ "epoch": 3.526096033402923,
+ "grad_norm": 0.8006004095077515,
+ "learning_rate": 1.979938440761287e-06,
+ "loss": 0.4755,
+ "step": 1128
+ },
+ {
+ "epoch": 3.5292275574112733,
+ "grad_norm": 1.0635333061218262,
+ "learning_rate": 1.97570429186839e-06,
+ "loss": 0.4985,
+ "step": 1129
+ },
+ {
+ "epoch": 3.5323590814196244,
+ "grad_norm": 0.7470075488090515,
+ "learning_rate": 1.9714717155143083e-06,
+ "loss": 0.4307,
+ "step": 1130
+ },
+ {
+ "epoch": 3.535490605427975,
+ "grad_norm": 0.8314558863639832,
+ "learning_rate": 1.967240724393959e-06,
+ "loss": 0.4894,
+ "step": 1131
+ },
+ {
+ "epoch": 3.538622129436326,
+ "grad_norm": 0.761443018913269,
+ "learning_rate": 1.963011331197506e-06,
+ "loss": 0.4653,
+ "step": 1132
+ },
+ {
+ "epoch": 3.5417536534446765,
+ "grad_norm": 0.7483212351799011,
+ "learning_rate": 1.9587835486103163e-06,
+ "loss": 0.4456,
+ "step": 1133
+ },
+ {
+ "epoch": 3.5448851774530272,
+ "grad_norm": 0.7794159054756165,
+ "learning_rate": 1.9545573893129306e-06,
+ "loss": 0.4707,
+ "step": 1134
+ },
+ {
+ "epoch": 3.548016701461378,
+ "grad_norm": 0.8016185760498047,
+ "learning_rate": 1.950332865981019e-06,
+ "loss": 0.4547,
+ "step": 1135
+ },
+ {
+ "epoch": 3.5511482254697286,
+ "grad_norm": 0.8089869618415833,
+ "learning_rate": 1.9461099912853453e-06,
+ "loss": 0.4499,
+ "step": 1136
+ },
+ {
+ "epoch": 3.5542797494780793,
+ "grad_norm": 0.7774782180786133,
+ "learning_rate": 1.9418887778917286e-06,
+ "loss": 0.4531,
+ "step": 1137
+ },
+ {
+ "epoch": 3.55741127348643,
+ "grad_norm": 0.7793645262718201,
+ "learning_rate": 1.937669238461003e-06,
+ "loss": 0.4553,
+ "step": 1138
+ },
+ {
+ "epoch": 3.5605427974947808,
+ "grad_norm": 0.8139959573745728,
+ "learning_rate": 1.933451385648985e-06,
+ "loss": 0.458,
+ "step": 1139
+ },
+ {
+ "epoch": 3.5636743215031315,
+ "grad_norm": 0.7517053484916687,
+ "learning_rate": 1.929235232106431e-06,
+ "loss": 0.4779,
+ "step": 1140
+ },
+ {
+ "epoch": 3.566805845511482,
+ "grad_norm": 0.8851562142372131,
+ "learning_rate": 1.925020790479e-06,
+ "loss": 0.425,
+ "step": 1141
+ },
+ {
+ "epoch": 3.569937369519833,
+ "grad_norm": 0.8129401803016663,
+ "learning_rate": 1.920808073407218e-06,
+ "loss": 0.4616,
+ "step": 1142
+ },
+ {
+ "epoch": 3.5730688935281836,
+ "grad_norm": 0.7110117077827454,
+ "learning_rate": 1.916597093526437e-06,
+ "loss": 0.4748,
+ "step": 1143
+ },
+ {
+ "epoch": 3.5762004175365343,
+ "grad_norm": 0.8268555402755737,
+ "learning_rate": 1.912387863466798e-06,
+ "loss": 0.4752,
+ "step": 1144
+ },
+ {
+ "epoch": 3.5793319415448854,
+ "grad_norm": 1.1036733388900757,
+ "learning_rate": 1.9081803958531967e-06,
+ "loss": 0.4879,
+ "step": 1145
+ },
+ {
+ "epoch": 3.5824634655532357,
+ "grad_norm": 0.8561109304428101,
+ "learning_rate": 1.9039747033052395e-06,
+ "loss": 0.4409,
+ "step": 1146
+ },
+ {
+ "epoch": 3.585594989561587,
+ "grad_norm": 0.7597541809082031,
+ "learning_rate": 1.8997707984372119e-06,
+ "loss": 0.4518,
+ "step": 1147
+ },
+ {
+ "epoch": 3.588726513569937,
+ "grad_norm": 0.7225353121757507,
+ "learning_rate": 1.8955686938580329e-06,
+ "loss": 0.4735,
+ "step": 1148
+ },
+ {
+ "epoch": 3.5918580375782883,
+ "grad_norm": 0.9409791231155396,
+ "learning_rate": 1.8913684021712264e-06,
+ "loss": 0.4249,
+ "step": 1149
+ },
+ {
+ "epoch": 3.594989561586639,
+ "grad_norm": 0.8757275342941284,
+ "learning_rate": 1.8871699359748763e-06,
+ "loss": 0.4479,
+ "step": 1150
+ },
+ {
+ "epoch": 3.5981210855949897,
+ "grad_norm": 0.8090003728866577,
+ "learning_rate": 1.882973307861593e-06,
+ "loss": 0.4846,
+ "step": 1151
+ },
+ {
+ "epoch": 3.6012526096033404,
+ "grad_norm": 0.7568825483322144,
+ "learning_rate": 1.8787785304184726e-06,
+ "loss": 0.4301,
+ "step": 1152
+ },
+ {
+ "epoch": 3.604384133611691,
+ "grad_norm": 0.8233815431594849,
+ "learning_rate": 1.8745856162270592e-06,
+ "loss": 0.4838,
+ "step": 1153
+ },
+ {
+ "epoch": 3.607515657620042,
+ "grad_norm": 0.7817628979682922,
+ "learning_rate": 1.8703945778633121e-06,
+ "loss": 0.4669,
+ "step": 1154
+ },
+ {
+ "epoch": 3.6106471816283925,
+ "grad_norm": 0.821304202079773,
+ "learning_rate": 1.8662054278975605e-06,
+ "loss": 0.4536,
+ "step": 1155
+ },
+ {
+ "epoch": 3.613778705636743,
+ "grad_norm": 0.9304089546203613,
+ "learning_rate": 1.8620181788944712e-06,
+ "loss": 0.4489,
+ "step": 1156
+ },
+ {
+ "epoch": 3.616910229645094,
+ "grad_norm": 0.8202670216560364,
+ "learning_rate": 1.8578328434130114e-06,
+ "loss": 0.4309,
+ "step": 1157
+ },
+ {
+ "epoch": 3.6200417536534446,
+ "grad_norm": 0.8890257477760315,
+ "learning_rate": 1.8536494340064051e-06,
+ "loss": 0.4736,
+ "step": 1158
+ },
+ {
+ "epoch": 3.6231732776617953,
+ "grad_norm": 0.7940590381622314,
+ "learning_rate": 1.8494679632221013e-06,
+ "loss": 0.4468,
+ "step": 1159
+ },
+ {
+ "epoch": 3.626304801670146,
+ "grad_norm": 0.8388273119926453,
+ "learning_rate": 1.845288443601736e-06,
+ "loss": 0.4753,
+ "step": 1160
+ },
+ {
+ "epoch": 3.6294363256784967,
+ "grad_norm": 0.9392285346984863,
+ "learning_rate": 1.84111088768109e-06,
+ "loss": 0.4736,
+ "step": 1161
+ },
+ {
+ "epoch": 3.632567849686848,
+ "grad_norm": 0.7411681413650513,
+ "learning_rate": 1.8369353079900576e-06,
+ "loss": 0.4303,
+ "step": 1162
+ },
+ {
+ "epoch": 3.635699373695198,
+ "grad_norm": 0.8722569942474365,
+ "learning_rate": 1.8327617170526014e-06,
+ "loss": 0.4604,
+ "step": 1163
+ },
+ {
+ "epoch": 3.6388308977035493,
+ "grad_norm": 0.786891758441925,
+ "learning_rate": 1.8285901273867229e-06,
+ "loss": 0.4756,
+ "step": 1164
+ },
+ {
+ "epoch": 3.6419624217118995,
+ "grad_norm": 0.8159083724021912,
+ "learning_rate": 1.824420551504419e-06,
+ "loss": 0.4675,
+ "step": 1165
+ },
+ {
+ "epoch": 3.6450939457202507,
+ "grad_norm": 0.8271334767341614,
+ "learning_rate": 1.8202530019116487e-06,
+ "loss": 0.4311,
+ "step": 1166
+ },
+ {
+ "epoch": 3.6482254697286014,
+ "grad_norm": 0.7617189288139343,
+ "learning_rate": 1.816087491108292e-06,
+ "loss": 0.4522,
+ "step": 1167
+ },
+ {
+ "epoch": 3.651356993736952,
+ "grad_norm": 0.7248172760009766,
+ "learning_rate": 1.8119240315881126e-06,
+ "loss": 0.465,
+ "step": 1168
+ },
+ {
+ "epoch": 3.654488517745303,
+ "grad_norm": 0.8606911897659302,
+ "learning_rate": 1.8077626358387235e-06,
+ "loss": 0.4524,
+ "step": 1169
+ },
+ {
+ "epoch": 3.6576200417536535,
+ "grad_norm": 0.8571308851242065,
+ "learning_rate": 1.8036033163415484e-06,
+ "loss": 0.4625,
+ "step": 1170
+ },
+ {
+ "epoch": 3.6607515657620042,
+ "grad_norm": 0.7239511609077454,
+ "learning_rate": 1.7994460855717812e-06,
+ "loss": 0.5025,
+ "step": 1171
+ },
+ {
+ "epoch": 3.663883089770355,
+ "grad_norm": 0.7958929538726807,
+ "learning_rate": 1.7952909559983544e-06,
+ "loss": 0.4382,
+ "step": 1172
+ },
+ {
+ "epoch": 3.6670146137787056,
+ "grad_norm": 0.7920124530792236,
+ "learning_rate": 1.7911379400838947e-06,
+ "loss": 0.4393,
+ "step": 1173
+ },
+ {
+ "epoch": 3.6701461377870563,
+ "grad_norm": 0.8072578310966492,
+ "learning_rate": 1.7869870502846903e-06,
+ "loss": 0.4627,
+ "step": 1174
+ },
+ {
+ "epoch": 3.673277661795407,
+ "grad_norm": 0.8586218357086182,
+ "learning_rate": 1.7828382990506543e-06,
+ "loss": 0.4456,
+ "step": 1175
+ },
+ {
+ "epoch": 3.6764091858037578,
+ "grad_norm": 0.8741613030433655,
+ "learning_rate": 1.7786916988252845e-06,
+ "loss": 0.4613,
+ "step": 1176
+ },
+ {
+ "epoch": 3.6795407098121085,
+ "grad_norm": 0.7691352367401123,
+ "learning_rate": 1.774547262045626e-06,
+ "loss": 0.4641,
+ "step": 1177
+ },
+ {
+ "epoch": 3.682672233820459,
+ "grad_norm": 0.7866089940071106,
+ "learning_rate": 1.7704050011422357e-06,
+ "loss": 0.4308,
+ "step": 1178
+ },
+ {
+ "epoch": 3.68580375782881,
+ "grad_norm": 0.9934884309768677,
+ "learning_rate": 1.7662649285391447e-06,
+ "loss": 0.4434,
+ "step": 1179
+ },
+ {
+ "epoch": 3.6889352818371606,
+ "grad_norm": 0.794385552406311,
+ "learning_rate": 1.7621270566538204e-06,
+ "loss": 0.4481,
+ "step": 1180
+ },
+ {
+ "epoch": 3.6920668058455117,
+ "grad_norm": 0.7573548555374146,
+ "learning_rate": 1.7579913978971296e-06,
+ "loss": 0.4525,
+ "step": 1181
+ },
+ {
+ "epoch": 3.695198329853862,
+ "grad_norm": 0.7073976993560791,
+ "learning_rate": 1.7538579646733023e-06,
+ "loss": 0.4214,
+ "step": 1182
+ },
+ {
+ "epoch": 3.698329853862213,
+ "grad_norm": 0.8009579181671143,
+ "learning_rate": 1.7497267693798902e-06,
+ "loss": 0.4542,
+ "step": 1183
+ },
+ {
+ "epoch": 3.701461377870564,
+ "grad_norm": 1.4488778114318848,
+ "learning_rate": 1.7455978244077348e-06,
+ "loss": 0.443,
+ "step": 1184
+ },
+ {
+ "epoch": 3.7045929018789145,
+ "grad_norm": 1.0529266595840454,
+ "learning_rate": 1.7414711421409292e-06,
+ "loss": 0.4908,
+ "step": 1185
+ },
+ {
+ "epoch": 3.7077244258872653,
+ "grad_norm": 0.757431149482727,
+ "learning_rate": 1.7373467349567775e-06,
+ "loss": 0.4454,
+ "step": 1186
+ },
+ {
+ "epoch": 3.710855949895616,
+ "grad_norm": 0.7086379528045654,
+ "learning_rate": 1.733224615225763e-06,
+ "loss": 0.4292,
+ "step": 1187
+ },
+ {
+ "epoch": 3.7139874739039667,
+ "grad_norm": 0.7454110383987427,
+ "learning_rate": 1.7291047953115049e-06,
+ "loss": 0.4421,
+ "step": 1188
+ },
+ {
+ "epoch": 3.7171189979123174,
+ "grad_norm": 0.804027795791626,
+ "learning_rate": 1.7249872875707257e-06,
+ "loss": 0.4819,
+ "step": 1189
+ },
+ {
+ "epoch": 3.720250521920668,
+ "grad_norm": 0.8159645199775696,
+ "learning_rate": 1.7208721043532146e-06,
+ "loss": 0.4628,
+ "step": 1190
+ },
+ {
+ "epoch": 3.723382045929019,
+ "grad_norm": 0.8451672196388245,
+ "learning_rate": 1.7167592580017866e-06,
+ "loss": 0.4773,
+ "step": 1191
+ },
+ {
+ "epoch": 3.7265135699373695,
+ "grad_norm": 0.920553982257843,
+ "learning_rate": 1.7126487608522492e-06,
+ "loss": 0.4787,
+ "step": 1192
+ },
+ {
+ "epoch": 3.72964509394572,
+ "grad_norm": 0.9169708490371704,
+ "learning_rate": 1.7085406252333613e-06,
+ "loss": 0.4543,
+ "step": 1193
+ },
+ {
+ "epoch": 3.732776617954071,
+ "grad_norm": 0.7245096564292908,
+ "learning_rate": 1.7044348634668023e-06,
+ "loss": 0.4599,
+ "step": 1194
+ },
+ {
+ "epoch": 3.7359081419624216,
+ "grad_norm": 0.835832417011261,
+ "learning_rate": 1.7003314878671284e-06,
+ "loss": 0.4794,
+ "step": 1195
+ },
+ {
+ "epoch": 3.7390396659707723,
+ "grad_norm": 0.8455896973609924,
+ "learning_rate": 1.696230510741742e-06,
+ "loss": 0.4329,
+ "step": 1196
+ },
+ {
+ "epoch": 3.742171189979123,
+ "grad_norm": 0.743611752986908,
+ "learning_rate": 1.692131944390849e-06,
+ "loss": 0.4455,
+ "step": 1197
+ },
+ {
+ "epoch": 3.745302713987474,
+ "grad_norm": 1.1334915161132812,
+ "learning_rate": 1.6880358011074272e-06,
+ "loss": 0.4673,
+ "step": 1198
+ },
+ {
+ "epoch": 3.7484342379958244,
+ "grad_norm": 0.6935724020004272,
+ "learning_rate": 1.6839420931771828e-06,
+ "loss": 0.4686,
+ "step": 1199
+ },
+ {
+ "epoch": 3.7515657620041756,
+ "grad_norm": 0.962006151676178,
+ "learning_rate": 1.6798508328785213e-06,
+ "loss": 0.4474,
+ "step": 1200
+ },
+ {
+ "epoch": 3.754697286012526,
+ "grad_norm": 1.1499714851379395,
+ "learning_rate": 1.6757620324825047e-06,
+ "loss": 0.4659,
+ "step": 1201
+ },
+ {
+ "epoch": 3.757828810020877,
+ "grad_norm": 0.7689645886421204,
+ "learning_rate": 1.6716757042528192e-06,
+ "loss": 0.4552,
+ "step": 1202
+ },
+ {
+ "epoch": 3.7609603340292277,
+ "grad_norm": 0.7482030391693115,
+ "learning_rate": 1.6675918604457352e-06,
+ "loss": 0.4747,
+ "step": 1203
+ },
+ {
+ "epoch": 3.7640918580375784,
+ "grad_norm": 0.7727032899856567,
+ "learning_rate": 1.6635105133100686e-06,
+ "loss": 0.4508,
+ "step": 1204
+ },
+ {
+ "epoch": 3.767223382045929,
+ "grad_norm": 0.8722149133682251,
+ "learning_rate": 1.6594316750871514e-06,
+ "loss": 0.4685,
+ "step": 1205
+ },
+ {
+ "epoch": 3.77035490605428,
+ "grad_norm": 0.771304726600647,
+ "learning_rate": 1.6553553580107884e-06,
+ "loss": 0.4418,
+ "step": 1206
+ },
+ {
+ "epoch": 3.7734864300626305,
+ "grad_norm": 0.767315149307251,
+ "learning_rate": 1.6512815743072214e-06,
+ "loss": 0.4532,
+ "step": 1207
+ },
+ {
+ "epoch": 3.776617954070981,
+ "grad_norm": 0.8825518488883972,
+ "learning_rate": 1.6472103361950976e-06,
+ "loss": 0.468,
+ "step": 1208
+ },
+ {
+ "epoch": 3.779749478079332,
+ "grad_norm": 0.8887981176376343,
+ "learning_rate": 1.6431416558854243e-06,
+ "loss": 0.4264,
+ "step": 1209
+ },
+ {
+ "epoch": 3.7828810020876826,
+ "grad_norm": 0.8399733304977417,
+ "learning_rate": 1.63907554558154e-06,
+ "loss": 0.4405,
+ "step": 1210
+ },
+ {
+ "epoch": 3.7860125260960333,
+ "grad_norm": 0.8112586140632629,
+ "learning_rate": 1.6350120174790751e-06,
+ "loss": 0.445,
+ "step": 1211
+ },
+ {
+ "epoch": 3.789144050104384,
+ "grad_norm": 0.860775351524353,
+ "learning_rate": 1.6309510837659137e-06,
+ "loss": 0.4557,
+ "step": 1212
+ },
+ {
+ "epoch": 3.7922755741127347,
+ "grad_norm": 0.8522343039512634,
+ "learning_rate": 1.626892756622161e-06,
+ "loss": 0.481,
+ "step": 1213
+ },
+ {
+ "epoch": 3.7954070981210855,
+ "grad_norm": 0.7927511930465698,
+ "learning_rate": 1.6228370482200988e-06,
+ "loss": 0.4328,
+ "step": 1214
+ },
+ {
+ "epoch": 3.798538622129436,
+ "grad_norm": 0.7350064516067505,
+ "learning_rate": 1.6187839707241604e-06,
+ "loss": 0.4604,
+ "step": 1215
+ },
+ {
+ "epoch": 3.801670146137787,
+ "grad_norm": 0.8363698124885559,
+ "learning_rate": 1.6147335362908847e-06,
+ "loss": 0.4271,
+ "step": 1216
+ },
+ {
+ "epoch": 3.804801670146138,
+ "grad_norm": 1.080613613128662,
+ "learning_rate": 1.610685757068885e-06,
+ "loss": 0.447,
+ "step": 1217
+ },
+ {
+ "epoch": 3.8079331941544883,
+ "grad_norm": 1.1507478952407837,
+ "learning_rate": 1.6066406451988104e-06,
+ "loss": 0.4664,
+ "step": 1218
+ },
+ {
+ "epoch": 3.8110647181628394,
+ "grad_norm": 0.7778187990188599,
+ "learning_rate": 1.6025982128133073e-06,
+ "loss": 0.456,
+ "step": 1219
+ },
+ {
+ "epoch": 3.81419624217119,
+ "grad_norm": 0.8383583426475525,
+ "learning_rate": 1.5985584720369876e-06,
+ "loss": 0.4684,
+ "step": 1220
+ },
+ {
+ "epoch": 3.817327766179541,
+ "grad_norm": 0.7743321061134338,
+ "learning_rate": 1.5945214349863914e-06,
+ "loss": 0.4567,
+ "step": 1221
+ },
+ {
+ "epoch": 3.8204592901878915,
+ "grad_norm": 0.8020774126052856,
+ "learning_rate": 1.5904871137699462e-06,
+ "loss": 0.4175,
+ "step": 1222
+ },
+ {
+ "epoch": 3.8235908141962422,
+ "grad_norm": 0.790318489074707,
+ "learning_rate": 1.5864555204879375e-06,
+ "loss": 0.469,
+ "step": 1223
+ },
+ {
+ "epoch": 3.826722338204593,
+ "grad_norm": 0.8583689332008362,
+ "learning_rate": 1.5824266672324652e-06,
+ "loss": 0.4931,
+ "step": 1224
+ },
+ {
+ "epoch": 3.8298538622129437,
+ "grad_norm": 0.7788206934928894,
+ "learning_rate": 1.5784005660874125e-06,
+ "loss": 0.4643,
+ "step": 1225
+ },
+ {
+ "epoch": 3.8329853862212944,
+ "grad_norm": 0.8385717868804932,
+ "learning_rate": 1.574377229128409e-06,
+ "loss": 0.4567,
+ "step": 1226
+ },
+ {
+ "epoch": 3.836116910229645,
+ "grad_norm": 0.8447727560997009,
+ "learning_rate": 1.5703566684227922e-06,
+ "loss": 0.42,
+ "step": 1227
+ },
+ {
+ "epoch": 3.8392484342379958,
+ "grad_norm": 0.7286496758460999,
+ "learning_rate": 1.5663388960295742e-06,
+ "loss": 0.4603,
+ "step": 1228
+ },
+ {
+ "epoch": 3.8423799582463465,
+ "grad_norm": 0.8493947982788086,
+ "learning_rate": 1.562323923999401e-06,
+ "loss": 0.4731,
+ "step": 1229
+ },
+ {
+ "epoch": 3.845511482254697,
+ "grad_norm": 0.8641151785850525,
+ "learning_rate": 1.5583117643745233e-06,
+ "loss": 0.4491,
+ "step": 1230
+ },
+ {
+ "epoch": 3.848643006263048,
+ "grad_norm": 0.9493702054023743,
+ "learning_rate": 1.5543024291887532e-06,
+ "loss": 0.411,
+ "step": 1231
+ },
+ {
+ "epoch": 3.8517745302713986,
+ "grad_norm": 0.7246205806732178,
+ "learning_rate": 1.5502959304674337e-06,
+ "loss": 0.4569,
+ "step": 1232
+ },
+ {
+ "epoch": 3.8549060542797493,
+ "grad_norm": 0.7576872110366821,
+ "learning_rate": 1.5462922802273994e-06,
+ "loss": 0.4548,
+ "step": 1233
+ },
+ {
+ "epoch": 3.8580375782881005,
+ "grad_norm": 0.7710747718811035,
+ "learning_rate": 1.5422914904769404e-06,
+ "loss": 0.447,
+ "step": 1234
+ },
+ {
+ "epoch": 3.8611691022964507,
+ "grad_norm": 0.7661204934120178,
+ "learning_rate": 1.5382935732157677e-06,
+ "loss": 0.4601,
+ "step": 1235
+ },
+ {
+ "epoch": 3.864300626304802,
+ "grad_norm": 0.7133070826530457,
+ "learning_rate": 1.5342985404349788e-06,
+ "loss": 0.4245,
+ "step": 1236
+ },
+ {
+ "epoch": 3.867432150313152,
+ "grad_norm": 0.7716459631919861,
+ "learning_rate": 1.5303064041170163e-06,
+ "loss": 0.4543,
+ "step": 1237
+ },
+ {
+ "epoch": 3.8705636743215033,
+ "grad_norm": 0.737501859664917,
+ "learning_rate": 1.5263171762356388e-06,
+ "loss": 0.4405,
+ "step": 1238
+ },
+ {
+ "epoch": 3.873695198329854,
+ "grad_norm": 0.7885998487472534,
+ "learning_rate": 1.5223308687558786e-06,
+ "loss": 0.4412,
+ "step": 1239
+ },
+ {
+ "epoch": 3.8768267223382047,
+ "grad_norm": 0.7516661882400513,
+ "learning_rate": 1.5183474936340092e-06,
+ "loss": 0.4185,
+ "step": 1240
+ },
+ {
+ "epoch": 3.8799582463465554,
+ "grad_norm": 0.8790446519851685,
+ "learning_rate": 1.5143670628175111e-06,
+ "loss": 0.444,
+ "step": 1241
+ },
+ {
+ "epoch": 3.883089770354906,
+ "grad_norm": 0.7695789337158203,
+ "learning_rate": 1.5103895882450315e-06,
+ "loss": 0.4615,
+ "step": 1242
+ },
+ {
+ "epoch": 3.886221294363257,
+ "grad_norm": 0.7859196662902832,
+ "learning_rate": 1.506415081846353e-06,
+ "loss": 0.4379,
+ "step": 1243
+ },
+ {
+ "epoch": 3.8893528183716075,
+ "grad_norm": 0.7771942615509033,
+ "learning_rate": 1.5024435555423522e-06,
+ "loss": 0.5013,
+ "step": 1244
+ },
+ {
+ "epoch": 3.892484342379958,
+ "grad_norm": 0.7415695190429688,
+ "learning_rate": 1.498475021244971e-06,
+ "loss": 0.4861,
+ "step": 1245
+ },
+ {
+ "epoch": 3.895615866388309,
+ "grad_norm": 0.7909391522407532,
+ "learning_rate": 1.4945094908571755e-06,
+ "loss": 0.4599,
+ "step": 1246
+ },
+ {
+ "epoch": 3.8987473903966596,
+ "grad_norm": 0.7749060988426208,
+ "learning_rate": 1.490546976272923e-06,
+ "loss": 0.3986,
+ "step": 1247
+ },
+ {
+ "epoch": 3.9018789144050103,
+ "grad_norm": 0.810681164264679,
+ "learning_rate": 1.4865874893771248e-06,
+ "loss": 0.4495,
+ "step": 1248
+ },
+ {
+ "epoch": 3.905010438413361,
+ "grad_norm": 0.8018531799316406,
+ "learning_rate": 1.4826310420456103e-06,
+ "loss": 0.4426,
+ "step": 1249
+ },
+ {
+ "epoch": 3.9081419624217117,
+ "grad_norm": 0.756064236164093,
+ "learning_rate": 1.4786776461450924e-06,
+ "loss": 0.4474,
+ "step": 1250
+ },
+ {
+ "epoch": 3.911273486430063,
+ "grad_norm": 0.7581740021705627,
+ "learning_rate": 1.4747273135331347e-06,
+ "loss": 0.4494,
+ "step": 1251
+ },
+ {
+ "epoch": 3.914405010438413,
+ "grad_norm": 0.7666076421737671,
+ "learning_rate": 1.4707800560581086e-06,
+ "loss": 0.4593,
+ "step": 1252
+ },
+ {
+ "epoch": 3.9175365344467643,
+ "grad_norm": 0.7339973449707031,
+ "learning_rate": 1.4668358855591664e-06,
+ "loss": 0.4682,
+ "step": 1253
+ },
+ {
+ "epoch": 3.9206680584551146,
+ "grad_norm": 0.8504599928855896,
+ "learning_rate": 1.4628948138661974e-06,
+ "loss": 0.4504,
+ "step": 1254
+ },
+ {
+ "epoch": 3.9237995824634657,
+ "grad_norm": 0.8332642912864685,
+ "learning_rate": 1.4589568527997985e-06,
+ "loss": 0.5024,
+ "step": 1255
+ },
+ {
+ "epoch": 3.9269311064718164,
+ "grad_norm": 0.7813694477081299,
+ "learning_rate": 1.4550220141712384e-06,
+ "loss": 0.4547,
+ "step": 1256
+ },
+ {
+ "epoch": 3.930062630480167,
+ "grad_norm": 0.842258632183075,
+ "learning_rate": 1.451090309782417e-06,
+ "loss": 0.4584,
+ "step": 1257
+ },
+ {
+ "epoch": 3.933194154488518,
+ "grad_norm": 0.8159133791923523,
+ "learning_rate": 1.4471617514258373e-06,
+ "loss": 0.4538,
+ "step": 1258
+ },
+ {
+ "epoch": 3.9363256784968685,
+ "grad_norm": 0.8117021322250366,
+ "learning_rate": 1.4432363508845626e-06,
+ "loss": 0.4315,
+ "step": 1259
+ },
+ {
+ "epoch": 3.9394572025052192,
+ "grad_norm": 0.8087465167045593,
+ "learning_rate": 1.4393141199321881e-06,
+ "loss": 0.4367,
+ "step": 1260
+ },
+ {
+ "epoch": 3.94258872651357,
+ "grad_norm": 0.7954697012901306,
+ "learning_rate": 1.435395070332801e-06,
+ "loss": 0.4515,
+ "step": 1261
+ },
+ {
+ "epoch": 3.9457202505219207,
+ "grad_norm": 0.7305286526679993,
+ "learning_rate": 1.4314792138409454e-06,
+ "loss": 0.4879,
+ "step": 1262
+ },
+ {
+ "epoch": 3.9488517745302714,
+ "grad_norm": 0.8883433938026428,
+ "learning_rate": 1.4275665622015908e-06,
+ "loss": 0.4489,
+ "step": 1263
+ },
+ {
+ "epoch": 3.951983298538622,
+ "grad_norm": 0.8176298141479492,
+ "learning_rate": 1.4236571271500909e-06,
+ "loss": 0.4583,
+ "step": 1264
+ },
+ {
+ "epoch": 3.9551148225469728,
+ "grad_norm": 0.8042430281639099,
+ "learning_rate": 1.4197509204121563e-06,
+ "loss": 0.4277,
+ "step": 1265
+ },
+ {
+ "epoch": 3.9582463465553235,
+ "grad_norm": 0.8153829574584961,
+ "learning_rate": 1.4158479537038095e-06,
+ "loss": 0.4389,
+ "step": 1266
+ },
+ {
+ "epoch": 3.961377870563674,
+ "grad_norm": 0.7908188104629517,
+ "learning_rate": 1.4119482387313588e-06,
+ "loss": 0.4421,
+ "step": 1267
+ },
+ {
+ "epoch": 3.964509394572025,
+ "grad_norm": 0.831758975982666,
+ "learning_rate": 1.4080517871913596e-06,
+ "loss": 0.4308,
+ "step": 1268
+ },
+ {
+ "epoch": 3.9676409185803756,
+ "grad_norm": 0.8191989064216614,
+ "learning_rate": 1.4041586107705758e-06,
+ "loss": 0.4654,
+ "step": 1269
+ },
+ {
+ "epoch": 3.9707724425887267,
+ "grad_norm": 0.9455055594444275,
+ "learning_rate": 1.4002687211459524e-06,
+ "loss": 0.4668,
+ "step": 1270
+ },
+ {
+ "epoch": 3.973903966597077,
+ "grad_norm": 0.9271034002304077,
+ "learning_rate": 1.396382129984572e-06,
+ "loss": 0.4414,
+ "step": 1271
+ },
+ {
+ "epoch": 3.977035490605428,
+ "grad_norm": 0.7652955651283264,
+ "learning_rate": 1.392498848943627e-06,
+ "loss": 0.4575,
+ "step": 1272
+ },
+ {
+ "epoch": 3.980167014613779,
+ "grad_norm": 0.7850046157836914,
+ "learning_rate": 1.3886188896703816e-06,
+ "loss": 0.4554,
+ "step": 1273
+ },
+ {
+ "epoch": 3.9832985386221296,
+ "grad_norm": 0.7194349765777588,
+ "learning_rate": 1.3847422638021357e-06,
+ "loss": 0.437,
+ "step": 1274
+ },
+ {
+ "epoch": 3.9864300626304803,
+ "grad_norm": 0.8726270198822021,
+ "learning_rate": 1.3808689829661899e-06,
+ "loss": 0.4657,
+ "step": 1275
+ },
+ {
+ "epoch": 3.989561586638831,
+ "grad_norm": 0.7741451263427734,
+ "learning_rate": 1.3769990587798146e-06,
+ "loss": 0.3931,
+ "step": 1276
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1914,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 319,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 3.866690661704217e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-1276/training_args.bin b/checkpoint-1276/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..8067ee9c1c0bc752bdfd00cfcaf1a6e717d2356b
--- /dev/null
+++ b/checkpoint-1276/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c650156a192cae0a2070c4323ee8a93e9b52fb76041d59ae0633b98389585727
+size 7928
diff --git a/checkpoint-1276/zero_to_fp32.py b/checkpoint-1276/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-1276/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-1595/README.md b/checkpoint-1595/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4a3934800eeb082a0cb833d7b6af4f68eed3615
--- /dev/null
+++ b/checkpoint-1595/README.md
@@ -0,0 +1,202 @@
+---
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-1595/adapter_config.json b/checkpoint-1595/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/checkpoint-1595/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-1595/adapter_model.safetensors b/checkpoint-1595/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..060917ffa3d5b31b004e46e40378c582bde06737
--- /dev/null
+++ b/checkpoint-1595/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d35d441689ced9e852803471814dd913950ba023247e90cf0f39c559f8737edd
+size 9016826528
diff --git a/checkpoint-1595/global_step1592/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-1595/global_step1592/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..e7782778faa1f579e9feebecfad2740a3b47cef8
--- /dev/null
+++ b/checkpoint-1595/global_step1592/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eef7ae5af7ab5ad410fdb8eb5b22f8645e4ffeb24143f04db6f6f04e7a5fde3e
+size 27050164444
diff --git a/checkpoint-1595/global_step1592/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-1595/global_step1592/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..61eef1ac5e0373f31da6117a6e419c60dd409a21
--- /dev/null
+++ b/checkpoint-1595/global_step1592/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:97198b3e57b90b1b2b2e567d1cc90e29b870c31b282dd01bf137d73411429cf2
+size 27050169884
diff --git a/checkpoint-1595/global_step1592/mp_rank_00_model_states.pt b/checkpoint-1595/global_step1592/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..81c8370e3a784f0ecc677f2a9cf82fa72be43217
--- /dev/null
+++ b/checkpoint-1595/global_step1592/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cd85f629390b3f992baab88f003d5b350091934f627a5849cf1530fad54eec7d
+size 9776788601
diff --git a/checkpoint-1595/latest b/checkpoint-1595/latest
new file mode 100644
index 0000000000000000000000000000000000000000..e3a83295a735ddc825ec0b9a9572d40b9610afc8
--- /dev/null
+++ b/checkpoint-1595/latest
@@ -0,0 +1 @@
+global_step1592
\ No newline at end of file
diff --git a/checkpoint-1595/rng_state_0.pth b/checkpoint-1595/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..f7a7da06359fa62040befa6b2c55fdda15e3f700
--- /dev/null
+++ b/checkpoint-1595/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a8385fc4f5dbe1eb501284b9c0e740cdc8c640953df92a6173e9527f6939ac71
+size 14512
diff --git a/checkpoint-1595/rng_state_1.pth b/checkpoint-1595/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..5bf28c27329b2bc1b484e1eea5340aec233d9718
--- /dev/null
+++ b/checkpoint-1595/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5a1edd2f0a0a5dac1c9d5d46cf82665e001446d5153cc015216e691b160d490b
+size 14512
diff --git a/checkpoint-1595/scheduler.pt b/checkpoint-1595/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a28eaf1a08372c2f3bb865836d82087559999807
--- /dev/null
+++ b/checkpoint-1595/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c004aebd915715db3fac26f19f3aa0aa57fed482d012d2785763c8c5661bd279
+size 1064
diff --git a/checkpoint-1595/special_tokens_map.json b/checkpoint-1595/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-1595/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-1595/tokenizer.json b/checkpoint-1595/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-1595/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-1595/tokenizer_config.json b/checkpoint-1595/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/checkpoint-1595/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-1595/trainer_state.json b/checkpoint-1595/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..3c48c1f5ff248851b77fbaa112a7cbceb308354e
--- /dev/null
+++ b/checkpoint-1595/trainer_state.json
@@ -0,0 +1,11198 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 4.98643006263048,
+ "eval_steps": 500,
+ "global_step": 1595,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.003131524008350731,
+ "grad_norm": 13.917898178100586,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 4.1051,
+ "step": 1
+ },
+ {
+ "epoch": 0.006263048016701462,
+ "grad_norm": 17.327869415283203,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 4.1048,
+ "step": 2
+ },
+ {
+ "epoch": 0.009394572025052192,
+ "grad_norm": 14.063946723937988,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 4.0741,
+ "step": 3
+ },
+ {
+ "epoch": 0.012526096033402923,
+ "grad_norm": 16.817699432373047,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 4.2002,
+ "step": 4
+ },
+ {
+ "epoch": 0.015657620041753653,
+ "grad_norm": 14.47036361694336,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 4.2652,
+ "step": 5
+ },
+ {
+ "epoch": 0.018789144050104383,
+ "grad_norm": 14.474193572998047,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 4.0888,
+ "step": 6
+ },
+ {
+ "epoch": 0.021920668058455117,
+ "grad_norm": 14.865458488464355,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 4.0014,
+ "step": 7
+ },
+ {
+ "epoch": 0.025052192066805846,
+ "grad_norm": 15.338888168334961,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 4.13,
+ "step": 8
+ },
+ {
+ "epoch": 0.028183716075156576,
+ "grad_norm": 15.154336929321289,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 4.2493,
+ "step": 9
+ },
+ {
+ "epoch": 0.031315240083507306,
+ "grad_norm": 15.919597625732422,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 4.0535,
+ "step": 10
+ },
+ {
+ "epoch": 0.03444676409185804,
+ "grad_norm": 14.981926918029785,
+ "learning_rate": 5.5e-07,
+ "loss": 3.9064,
+ "step": 11
+ },
+ {
+ "epoch": 0.037578288100208766,
+ "grad_norm": 13.36101245880127,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 4.1939,
+ "step": 12
+ },
+ {
+ "epoch": 0.0407098121085595,
+ "grad_norm": 15.58773422241211,
+ "learning_rate": 6.5e-07,
+ "loss": 4.18,
+ "step": 13
+ },
+ {
+ "epoch": 0.04384133611691023,
+ "grad_norm": 13.560139656066895,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 3.9414,
+ "step": 14
+ },
+ {
+ "epoch": 0.04697286012526096,
+ "grad_norm": 12.307971954345703,
+ "learning_rate": 7.5e-07,
+ "loss": 3.8836,
+ "step": 15
+ },
+ {
+ "epoch": 0.05010438413361169,
+ "grad_norm": 14.533182144165039,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 4.1551,
+ "step": 16
+ },
+ {
+ "epoch": 0.05323590814196242,
+ "grad_norm": 13.453729629516602,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 4.0048,
+ "step": 17
+ },
+ {
+ "epoch": 0.05636743215031315,
+ "grad_norm": 13.45992374420166,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 4.0745,
+ "step": 18
+ },
+ {
+ "epoch": 0.059498956158663886,
+ "grad_norm": 11.857145309448242,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 3.9871,
+ "step": 19
+ },
+ {
+ "epoch": 0.06263048016701461,
+ "grad_norm": 11.872294425964355,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 3.8959,
+ "step": 20
+ },
+ {
+ "epoch": 0.06576200417536535,
+ "grad_norm": 12.969825744628906,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 4.0308,
+ "step": 21
+ },
+ {
+ "epoch": 0.06889352818371608,
+ "grad_norm": 12.33769416809082,
+ "learning_rate": 1.1e-06,
+ "loss": 3.9341,
+ "step": 22
+ },
+ {
+ "epoch": 0.0720250521920668,
+ "grad_norm": 12.669405937194824,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 3.8511,
+ "step": 23
+ },
+ {
+ "epoch": 0.07515657620041753,
+ "grad_norm": 10.677213668823242,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 3.7764,
+ "step": 24
+ },
+ {
+ "epoch": 0.07828810020876827,
+ "grad_norm": 10.366402626037598,
+ "learning_rate": 1.25e-06,
+ "loss": 3.5291,
+ "step": 25
+ },
+ {
+ "epoch": 0.081419624217119,
+ "grad_norm": 11.211421012878418,
+ "learning_rate": 1.3e-06,
+ "loss": 3.5765,
+ "step": 26
+ },
+ {
+ "epoch": 0.08455114822546973,
+ "grad_norm": 11.313716888427734,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 3.4849,
+ "step": 27
+ },
+ {
+ "epoch": 0.08768267223382047,
+ "grad_norm": 10.41294002532959,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 3.2653,
+ "step": 28
+ },
+ {
+ "epoch": 0.09081419624217119,
+ "grad_norm": 10.40064525604248,
+ "learning_rate": 1.45e-06,
+ "loss": 3.3384,
+ "step": 29
+ },
+ {
+ "epoch": 0.09394572025052192,
+ "grad_norm": 10.05427074432373,
+ "learning_rate": 1.5e-06,
+ "loss": 3.2257,
+ "step": 30
+ },
+ {
+ "epoch": 0.09707724425887265,
+ "grad_norm": 9.583163261413574,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 3.1371,
+ "step": 31
+ },
+ {
+ "epoch": 0.10020876826722339,
+ "grad_norm": 10.09977912902832,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 3.0658,
+ "step": 32
+ },
+ {
+ "epoch": 0.10334029227557412,
+ "grad_norm": 9.271486282348633,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 2.9693,
+ "step": 33
+ },
+ {
+ "epoch": 0.10647181628392484,
+ "grad_norm": 10.687992095947266,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 2.95,
+ "step": 34
+ },
+ {
+ "epoch": 0.10960334029227557,
+ "grad_norm": 8.762290000915527,
+ "learning_rate": 1.75e-06,
+ "loss": 2.8286,
+ "step": 35
+ },
+ {
+ "epoch": 0.1127348643006263,
+ "grad_norm": 10.13785171508789,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 2.3664,
+ "step": 36
+ },
+ {
+ "epoch": 0.11586638830897704,
+ "grad_norm": 18.301353454589844,
+ "learning_rate": 1.85e-06,
+ "loss": 2.5533,
+ "step": 37
+ },
+ {
+ "epoch": 0.11899791231732777,
+ "grad_norm": 11.490377426147461,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 2.6133,
+ "step": 38
+ },
+ {
+ "epoch": 0.12212943632567849,
+ "grad_norm": 15.614163398742676,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 2.3596,
+ "step": 39
+ },
+ {
+ "epoch": 0.12526096033402923,
+ "grad_norm": 17.757442474365234,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 2.3491,
+ "step": 40
+ },
+ {
+ "epoch": 0.12839248434237996,
+ "grad_norm": 17.18431854248047,
+ "learning_rate": 2.05e-06,
+ "loss": 2.2361,
+ "step": 41
+ },
+ {
+ "epoch": 0.1315240083507307,
+ "grad_norm": 16.149789810180664,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 2.1457,
+ "step": 42
+ },
+ {
+ "epoch": 0.13465553235908143,
+ "grad_norm": 15.256914138793945,
+ "learning_rate": 2.15e-06,
+ "loss": 2.12,
+ "step": 43
+ },
+ {
+ "epoch": 0.13778705636743216,
+ "grad_norm": 15.537406921386719,
+ "learning_rate": 2.2e-06,
+ "loss": 2.1877,
+ "step": 44
+ },
+ {
+ "epoch": 0.1409185803757829,
+ "grad_norm": 7.947713851928711,
+ "learning_rate": 2.25e-06,
+ "loss": 2.1648,
+ "step": 45
+ },
+ {
+ "epoch": 0.1440501043841336,
+ "grad_norm": 8.818676948547363,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 2.134,
+ "step": 46
+ },
+ {
+ "epoch": 0.14718162839248433,
+ "grad_norm": 5.175768852233887,
+ "learning_rate": 2.35e-06,
+ "loss": 2.0796,
+ "step": 47
+ },
+ {
+ "epoch": 0.15031315240083507,
+ "grad_norm": 6.750611305236816,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 1.9174,
+ "step": 48
+ },
+ {
+ "epoch": 0.1534446764091858,
+ "grad_norm": 6.2147979736328125,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 1.8065,
+ "step": 49
+ },
+ {
+ "epoch": 0.15657620041753653,
+ "grad_norm": 13.291611671447754,
+ "learning_rate": 2.5e-06,
+ "loss": 1.7061,
+ "step": 50
+ },
+ {
+ "epoch": 0.15970772442588727,
+ "grad_norm": 7.251201629638672,
+ "learning_rate": 2.55e-06,
+ "loss": 1.7924,
+ "step": 51
+ },
+ {
+ "epoch": 0.162839248434238,
+ "grad_norm": 5.2126054763793945,
+ "learning_rate": 2.6e-06,
+ "loss": 1.6735,
+ "step": 52
+ },
+ {
+ "epoch": 0.16597077244258873,
+ "grad_norm": 5.435528755187988,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 1.6265,
+ "step": 53
+ },
+ {
+ "epoch": 0.16910229645093947,
+ "grad_norm": 4.505807399749756,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 1.4851,
+ "step": 54
+ },
+ {
+ "epoch": 0.1722338204592902,
+ "grad_norm": 5.128388404846191,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 1.5832,
+ "step": 55
+ },
+ {
+ "epoch": 0.17536534446764093,
+ "grad_norm": 16.935827255249023,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 1.6553,
+ "step": 56
+ },
+ {
+ "epoch": 0.17849686847599164,
+ "grad_norm": 3.664458990097046,
+ "learning_rate": 2.85e-06,
+ "loss": 1.5,
+ "step": 57
+ },
+ {
+ "epoch": 0.18162839248434237,
+ "grad_norm": 7.763802528381348,
+ "learning_rate": 2.9e-06,
+ "loss": 1.367,
+ "step": 58
+ },
+ {
+ "epoch": 0.1847599164926931,
+ "grad_norm": 3.2216155529022217,
+ "learning_rate": 2.95e-06,
+ "loss": 1.3863,
+ "step": 59
+ },
+ {
+ "epoch": 0.18789144050104384,
+ "grad_norm": 4.384445667266846,
+ "learning_rate": 3e-06,
+ "loss": 1.4247,
+ "step": 60
+ },
+ {
+ "epoch": 0.19102296450939457,
+ "grad_norm": 4.8080878257751465,
+ "learning_rate": 3.05e-06,
+ "loss": 1.3257,
+ "step": 61
+ },
+ {
+ "epoch": 0.1941544885177453,
+ "grad_norm": 4.154761791229248,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 1.321,
+ "step": 62
+ },
+ {
+ "epoch": 0.19728601252609604,
+ "grad_norm": 6.4742112159729,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 1.2823,
+ "step": 63
+ },
+ {
+ "epoch": 0.20041753653444677,
+ "grad_norm": 2.583422899246216,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 1.2136,
+ "step": 64
+ },
+ {
+ "epoch": 0.2035490605427975,
+ "grad_norm": 4.1933488845825195,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 1.1855,
+ "step": 65
+ },
+ {
+ "epoch": 0.20668058455114824,
+ "grad_norm": 4.11049747467041,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 1.2389,
+ "step": 66
+ },
+ {
+ "epoch": 0.20981210855949894,
+ "grad_norm": 2.264458417892456,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 1.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.21294363256784968,
+ "grad_norm": 2.5408174991607666,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 1.1389,
+ "step": 68
+ },
+ {
+ "epoch": 0.2160751565762004,
+ "grad_norm": 7.82421350479126,
+ "learning_rate": 3.45e-06,
+ "loss": 1.0956,
+ "step": 69
+ },
+ {
+ "epoch": 0.21920668058455114,
+ "grad_norm": 3.070939064025879,
+ "learning_rate": 3.5e-06,
+ "loss": 1.0451,
+ "step": 70
+ },
+ {
+ "epoch": 0.22233820459290188,
+ "grad_norm": 2.6310527324676514,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 1.0538,
+ "step": 71
+ },
+ {
+ "epoch": 0.2254697286012526,
+ "grad_norm": 7.630155563354492,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 1.0052,
+ "step": 72
+ },
+ {
+ "epoch": 0.22860125260960334,
+ "grad_norm": 6.950636863708496,
+ "learning_rate": 3.65e-06,
+ "loss": 1.0473,
+ "step": 73
+ },
+ {
+ "epoch": 0.23173277661795408,
+ "grad_norm": 2.2703945636749268,
+ "learning_rate": 3.7e-06,
+ "loss": 1.0576,
+ "step": 74
+ },
+ {
+ "epoch": 0.2348643006263048,
+ "grad_norm": 3.3817710876464844,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 1.0177,
+ "step": 75
+ },
+ {
+ "epoch": 0.23799582463465555,
+ "grad_norm": 7.266414642333984,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 1.0645,
+ "step": 76
+ },
+ {
+ "epoch": 0.24112734864300625,
+ "grad_norm": 5.782608509063721,
+ "learning_rate": 3.85e-06,
+ "loss": 1.0162,
+ "step": 77
+ },
+ {
+ "epoch": 0.24425887265135698,
+ "grad_norm": 2.7938575744628906,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.9664,
+ "step": 78
+ },
+ {
+ "epoch": 0.24739039665970772,
+ "grad_norm": 6.681935787200928,
+ "learning_rate": 3.95e-06,
+ "loss": 0.953,
+ "step": 79
+ },
+ {
+ "epoch": 0.25052192066805845,
+ "grad_norm": 2.253279209136963,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.9568,
+ "step": 80
+ },
+ {
+ "epoch": 0.2536534446764092,
+ "grad_norm": 1.4875826835632324,
+ "learning_rate": 4.05e-06,
+ "loss": 0.9448,
+ "step": 81
+ },
+ {
+ "epoch": 0.2567849686847599,
+ "grad_norm": 2.4987940788269043,
+ "learning_rate": 4.1e-06,
+ "loss": 0.9393,
+ "step": 82
+ },
+ {
+ "epoch": 0.2599164926931106,
+ "grad_norm": 4.712948322296143,
+ "learning_rate": 4.15e-06,
+ "loss": 0.9532,
+ "step": 83
+ },
+ {
+ "epoch": 0.2630480167014614,
+ "grad_norm": 6.9030632972717285,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.96,
+ "step": 84
+ },
+ {
+ "epoch": 0.2661795407098121,
+ "grad_norm": 3.4780967235565186,
+ "learning_rate": 4.25e-06,
+ "loss": 0.8993,
+ "step": 85
+ },
+ {
+ "epoch": 0.26931106471816285,
+ "grad_norm": 1.526064395904541,
+ "learning_rate": 4.3e-06,
+ "loss": 0.9021,
+ "step": 86
+ },
+ {
+ "epoch": 0.27244258872651356,
+ "grad_norm": 10.727686882019043,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.856,
+ "step": 87
+ },
+ {
+ "epoch": 0.2755741127348643,
+ "grad_norm": 12.483160972595215,
+ "learning_rate": 4.4e-06,
+ "loss": 0.9357,
+ "step": 88
+ },
+ {
+ "epoch": 0.278705636743215,
+ "grad_norm": 6.544492244720459,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.9168,
+ "step": 89
+ },
+ {
+ "epoch": 0.2818371607515658,
+ "grad_norm": 1.178139567375183,
+ "learning_rate": 4.5e-06,
+ "loss": 0.8748,
+ "step": 90
+ },
+ {
+ "epoch": 0.2849686847599165,
+ "grad_norm": 1.711506962776184,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.8425,
+ "step": 91
+ },
+ {
+ "epoch": 0.2881002087682672,
+ "grad_norm": 3.281747341156006,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.8491,
+ "step": 92
+ },
+ {
+ "epoch": 0.29123173277661796,
+ "grad_norm": 2.2964377403259277,
+ "learning_rate": 4.65e-06,
+ "loss": 0.8038,
+ "step": 93
+ },
+ {
+ "epoch": 0.29436325678496866,
+ "grad_norm": 1.959700345993042,
+ "learning_rate": 4.7e-06,
+ "loss": 0.8439,
+ "step": 94
+ },
+ {
+ "epoch": 0.2974947807933194,
+ "grad_norm": 3.979384183883667,
+ "learning_rate": 4.75e-06,
+ "loss": 0.8839,
+ "step": 95
+ },
+ {
+ "epoch": 0.30062630480167013,
+ "grad_norm": 1.4721262454986572,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.845,
+ "step": 96
+ },
+ {
+ "epoch": 0.3037578288100209,
+ "grad_norm": 2.862248659133911,
+ "learning_rate": 4.85e-06,
+ "loss": 0.7748,
+ "step": 97
+ },
+ {
+ "epoch": 0.3068893528183716,
+ "grad_norm": 3.7439088821411133,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.8145,
+ "step": 98
+ },
+ {
+ "epoch": 0.31002087682672236,
+ "grad_norm": 1.6654618978500366,
+ "learning_rate": 4.95e-06,
+ "loss": 0.8326,
+ "step": 99
+ },
+ {
+ "epoch": 0.31315240083507306,
+ "grad_norm": 7.8437581062316895,
+ "learning_rate": 5e-06,
+ "loss": 0.8666,
+ "step": 100
+ },
+ {
+ "epoch": 0.3162839248434238,
+ "grad_norm": 6.429738521575928,
+ "learning_rate": 4.999996250830422e-06,
+ "loss": 0.836,
+ "step": 101
+ },
+ {
+ "epoch": 0.31941544885177453,
+ "grad_norm": 2.6017794609069824,
+ "learning_rate": 4.9999850033329326e-06,
+ "loss": 0.7785,
+ "step": 102
+ },
+ {
+ "epoch": 0.32254697286012524,
+ "grad_norm": 1.0575449466705322,
+ "learning_rate": 4.999966257541265e-06,
+ "loss": 0.7639,
+ "step": 103
+ },
+ {
+ "epoch": 0.325678496868476,
+ "grad_norm": 2.6932010650634766,
+ "learning_rate": 4.999940013511647e-06,
+ "loss": 0.8214,
+ "step": 104
+ },
+ {
+ "epoch": 0.3288100208768267,
+ "grad_norm": 2.925288438796997,
+ "learning_rate": 4.999906271322792e-06,
+ "loss": 0.8797,
+ "step": 105
+ },
+ {
+ "epoch": 0.33194154488517746,
+ "grad_norm": 1.3570607900619507,
+ "learning_rate": 4.9998650310759035e-06,
+ "loss": 0.792,
+ "step": 106
+ },
+ {
+ "epoch": 0.33507306889352817,
+ "grad_norm": 5.126713752746582,
+ "learning_rate": 4.999816292894676e-06,
+ "loss": 0.8352,
+ "step": 107
+ },
+ {
+ "epoch": 0.33820459290187893,
+ "grad_norm": 1.8966432809829712,
+ "learning_rate": 4.99976005692529e-06,
+ "loss": 0.7663,
+ "step": 108
+ },
+ {
+ "epoch": 0.34133611691022964,
+ "grad_norm": 1.3100829124450684,
+ "learning_rate": 4.999696323336418e-06,
+ "loss": 0.771,
+ "step": 109
+ },
+ {
+ "epoch": 0.3444676409185804,
+ "grad_norm": 2.4025354385375977,
+ "learning_rate": 4.999625092319218e-06,
+ "loss": 0.7618,
+ "step": 110
+ },
+ {
+ "epoch": 0.3475991649269311,
+ "grad_norm": 1.130232810974121,
+ "learning_rate": 4.999546364087334e-06,
+ "loss": 0.7705,
+ "step": 111
+ },
+ {
+ "epoch": 0.35073068893528186,
+ "grad_norm": 3.430262327194214,
+ "learning_rate": 4.999460138876901e-06,
+ "loss": 0.77,
+ "step": 112
+ },
+ {
+ "epoch": 0.35386221294363257,
+ "grad_norm": 1.1272103786468506,
+ "learning_rate": 4.999366416946536e-06,
+ "loss": 0.7133,
+ "step": 113
+ },
+ {
+ "epoch": 0.3569937369519833,
+ "grad_norm": 1.1740471124649048,
+ "learning_rate": 4.999265198577342e-06,
+ "loss": 0.7684,
+ "step": 114
+ },
+ {
+ "epoch": 0.36012526096033404,
+ "grad_norm": 1.3138248920440674,
+ "learning_rate": 4.999156484072907e-06,
+ "loss": 0.7888,
+ "step": 115
+ },
+ {
+ "epoch": 0.36325678496868474,
+ "grad_norm": 1.061711311340332,
+ "learning_rate": 4.999040273759304e-06,
+ "loss": 0.7484,
+ "step": 116
+ },
+ {
+ "epoch": 0.3663883089770355,
+ "grad_norm": 1.4682390689849854,
+ "learning_rate": 4.998916567985083e-06,
+ "loss": 0.7296,
+ "step": 117
+ },
+ {
+ "epoch": 0.3695198329853862,
+ "grad_norm": 2.884068250656128,
+ "learning_rate": 4.998785367121284e-06,
+ "loss": 0.7662,
+ "step": 118
+ },
+ {
+ "epoch": 0.37265135699373697,
+ "grad_norm": 0.9812761545181274,
+ "learning_rate": 4.9986466715614205e-06,
+ "loss": 0.7307,
+ "step": 119
+ },
+ {
+ "epoch": 0.3757828810020877,
+ "grad_norm": 2.2237496376037598,
+ "learning_rate": 4.998500481721484e-06,
+ "loss": 0.6761,
+ "step": 120
+ },
+ {
+ "epoch": 0.37891440501043844,
+ "grad_norm": 1.4004178047180176,
+ "learning_rate": 4.998346798039952e-06,
+ "loss": 0.7505,
+ "step": 121
+ },
+ {
+ "epoch": 0.38204592901878914,
+ "grad_norm": 5.54975700378418,
+ "learning_rate": 4.99818562097777e-06,
+ "loss": 0.7615,
+ "step": 122
+ },
+ {
+ "epoch": 0.38517745302713985,
+ "grad_norm": 6.17140531539917,
+ "learning_rate": 4.9980169510183624e-06,
+ "loss": 0.7002,
+ "step": 123
+ },
+ {
+ "epoch": 0.3883089770354906,
+ "grad_norm": 4.974380016326904,
+ "learning_rate": 4.997840788667628e-06,
+ "loss": 0.7449,
+ "step": 124
+ },
+ {
+ "epoch": 0.3914405010438413,
+ "grad_norm": 1.4133399724960327,
+ "learning_rate": 4.997657134453937e-06,
+ "loss": 0.7442,
+ "step": 125
+ },
+ {
+ "epoch": 0.3945720250521921,
+ "grad_norm": 1.868915319442749,
+ "learning_rate": 4.9974659889281295e-06,
+ "loss": 0.7104,
+ "step": 126
+ },
+ {
+ "epoch": 0.3977035490605428,
+ "grad_norm": 1.2599350214004517,
+ "learning_rate": 4.997267352663514e-06,
+ "loss": 0.7385,
+ "step": 127
+ },
+ {
+ "epoch": 0.40083507306889354,
+ "grad_norm": 1.4353641271591187,
+ "learning_rate": 4.997061226255869e-06,
+ "loss": 0.7081,
+ "step": 128
+ },
+ {
+ "epoch": 0.40396659707724425,
+ "grad_norm": 3.2492141723632812,
+ "learning_rate": 4.996847610323437e-06,
+ "loss": 0.7859,
+ "step": 129
+ },
+ {
+ "epoch": 0.407098121085595,
+ "grad_norm": 9.599719047546387,
+ "learning_rate": 4.996626505506923e-06,
+ "loss": 0.7241,
+ "step": 130
+ },
+ {
+ "epoch": 0.4102296450939457,
+ "grad_norm": 10.053650856018066,
+ "learning_rate": 4.996397912469494e-06,
+ "loss": 0.6841,
+ "step": 131
+ },
+ {
+ "epoch": 0.4133611691022965,
+ "grad_norm": 1.323876976966858,
+ "learning_rate": 4.996161831896777e-06,
+ "loss": 0.7317,
+ "step": 132
+ },
+ {
+ "epoch": 0.4164926931106472,
+ "grad_norm": 1.4180598258972168,
+ "learning_rate": 4.9959182644968594e-06,
+ "loss": 0.692,
+ "step": 133
+ },
+ {
+ "epoch": 0.4196242171189979,
+ "grad_norm": 1.2194396257400513,
+ "learning_rate": 4.99566721100028e-06,
+ "loss": 0.7068,
+ "step": 134
+ },
+ {
+ "epoch": 0.42275574112734865,
+ "grad_norm": 1.0984960794448853,
+ "learning_rate": 4.995408672160031e-06,
+ "loss": 0.6946,
+ "step": 135
+ },
+ {
+ "epoch": 0.42588726513569936,
+ "grad_norm": 1.9341071844100952,
+ "learning_rate": 4.995142648751561e-06,
+ "loss": 0.7467,
+ "step": 136
+ },
+ {
+ "epoch": 0.4290187891440501,
+ "grad_norm": 1.9960932731628418,
+ "learning_rate": 4.9948691415727594e-06,
+ "loss": 0.7379,
+ "step": 137
+ },
+ {
+ "epoch": 0.4321503131524008,
+ "grad_norm": 0.8743917942047119,
+ "learning_rate": 4.994588151443968e-06,
+ "loss": 0.66,
+ "step": 138
+ },
+ {
+ "epoch": 0.4352818371607516,
+ "grad_norm": 0.8655261993408203,
+ "learning_rate": 4.99429967920797e-06,
+ "loss": 0.6646,
+ "step": 139
+ },
+ {
+ "epoch": 0.4384133611691023,
+ "grad_norm": 5.462070941925049,
+ "learning_rate": 4.994003725729992e-06,
+ "loss": 0.643,
+ "step": 140
+ },
+ {
+ "epoch": 0.44154488517745305,
+ "grad_norm": 2.1401469707489014,
+ "learning_rate": 4.993700291897695e-06,
+ "loss": 0.6639,
+ "step": 141
+ },
+ {
+ "epoch": 0.44467640918580376,
+ "grad_norm": 1.8219833374023438,
+ "learning_rate": 4.9933893786211815e-06,
+ "loss": 0.6673,
+ "step": 142
+ },
+ {
+ "epoch": 0.44780793319415446,
+ "grad_norm": 1.641079306602478,
+ "learning_rate": 4.993070986832984e-06,
+ "loss": 0.658,
+ "step": 143
+ },
+ {
+ "epoch": 0.4509394572025052,
+ "grad_norm": 1.1739819049835205,
+ "learning_rate": 4.992745117488066e-06,
+ "loss": 0.6826,
+ "step": 144
+ },
+ {
+ "epoch": 0.45407098121085593,
+ "grad_norm": 2.309185743331909,
+ "learning_rate": 4.9924117715638185e-06,
+ "loss": 0.6536,
+ "step": 145
+ },
+ {
+ "epoch": 0.4572025052192067,
+ "grad_norm": 1.09304940700531,
+ "learning_rate": 4.99207095006006e-06,
+ "loss": 0.721,
+ "step": 146
+ },
+ {
+ "epoch": 0.4603340292275574,
+ "grad_norm": 0.9056984186172485,
+ "learning_rate": 4.991722653999025e-06,
+ "loss": 0.7019,
+ "step": 147
+ },
+ {
+ "epoch": 0.46346555323590816,
+ "grad_norm": 1.8440625667572021,
+ "learning_rate": 4.991366884425374e-06,
+ "loss": 0.707,
+ "step": 148
+ },
+ {
+ "epoch": 0.46659707724425886,
+ "grad_norm": 1.2244676351547241,
+ "learning_rate": 4.991003642406177e-06,
+ "loss": 0.6407,
+ "step": 149
+ },
+ {
+ "epoch": 0.4697286012526096,
+ "grad_norm": 0.9258589744567871,
+ "learning_rate": 4.99063292903092e-06,
+ "loss": 0.6954,
+ "step": 150
+ },
+ {
+ "epoch": 0.47286012526096033,
+ "grad_norm": 4.176390647888184,
+ "learning_rate": 4.990254745411496e-06,
+ "loss": 0.6812,
+ "step": 151
+ },
+ {
+ "epoch": 0.4759916492693111,
+ "grad_norm": 1.4322530031204224,
+ "learning_rate": 4.989869092682205e-06,
+ "loss": 0.6808,
+ "step": 152
+ },
+ {
+ "epoch": 0.4791231732776618,
+ "grad_norm": 0.8017717003822327,
+ "learning_rate": 4.989475971999748e-06,
+ "loss": 0.687,
+ "step": 153
+ },
+ {
+ "epoch": 0.4822546972860125,
+ "grad_norm": 1.5641374588012695,
+ "learning_rate": 4.989075384543228e-06,
+ "loss": 0.6599,
+ "step": 154
+ },
+ {
+ "epoch": 0.48538622129436326,
+ "grad_norm": 1.1522141695022583,
+ "learning_rate": 4.98866733151414e-06,
+ "loss": 0.6546,
+ "step": 155
+ },
+ {
+ "epoch": 0.48851774530271397,
+ "grad_norm": 0.8593171238899231,
+ "learning_rate": 4.988251814136372e-06,
+ "loss": 0.6857,
+ "step": 156
+ },
+ {
+ "epoch": 0.49164926931106473,
+ "grad_norm": 2.668159246444702,
+ "learning_rate": 4.9878288336562e-06,
+ "loss": 0.661,
+ "step": 157
+ },
+ {
+ "epoch": 0.49478079331941544,
+ "grad_norm": 0.9953671097755432,
+ "learning_rate": 4.987398391342285e-06,
+ "loss": 0.6512,
+ "step": 158
+ },
+ {
+ "epoch": 0.4979123173277662,
+ "grad_norm": 1.042872667312622,
+ "learning_rate": 4.986960488485667e-06,
+ "loss": 0.6311,
+ "step": 159
+ },
+ {
+ "epoch": 0.5010438413361169,
+ "grad_norm": 0.9070663452148438,
+ "learning_rate": 4.9865151263997645e-06,
+ "loss": 0.675,
+ "step": 160
+ },
+ {
+ "epoch": 0.5041753653444676,
+ "grad_norm": 0.8460433483123779,
+ "learning_rate": 4.986062306420367e-06,
+ "loss": 0.6635,
+ "step": 161
+ },
+ {
+ "epoch": 0.5073068893528184,
+ "grad_norm": 1.2639834880828857,
+ "learning_rate": 4.985602029905635e-06,
+ "loss": 0.6327,
+ "step": 162
+ },
+ {
+ "epoch": 0.5104384133611691,
+ "grad_norm": 0.8775074481964111,
+ "learning_rate": 4.985134298236091e-06,
+ "loss": 0.644,
+ "step": 163
+ },
+ {
+ "epoch": 0.5135699373695198,
+ "grad_norm": 1.2031961679458618,
+ "learning_rate": 4.98465911281462e-06,
+ "loss": 0.6254,
+ "step": 164
+ },
+ {
+ "epoch": 0.5167014613778705,
+ "grad_norm": 0.892494797706604,
+ "learning_rate": 4.984176475066463e-06,
+ "loss": 0.7122,
+ "step": 165
+ },
+ {
+ "epoch": 0.5198329853862212,
+ "grad_norm": 2.7122485637664795,
+ "learning_rate": 4.983686386439212e-06,
+ "loss": 0.6679,
+ "step": 166
+ },
+ {
+ "epoch": 0.5229645093945721,
+ "grad_norm": 0.9344426989555359,
+ "learning_rate": 4.983188848402806e-06,
+ "loss": 0.6319,
+ "step": 167
+ },
+ {
+ "epoch": 0.5260960334029228,
+ "grad_norm": 1.4093577861785889,
+ "learning_rate": 4.982683862449531e-06,
+ "loss": 0.6425,
+ "step": 168
+ },
+ {
+ "epoch": 0.5292275574112735,
+ "grad_norm": 1.1285009384155273,
+ "learning_rate": 4.982171430094007e-06,
+ "loss": 0.6298,
+ "step": 169
+ },
+ {
+ "epoch": 0.5323590814196242,
+ "grad_norm": 1.952778935432434,
+ "learning_rate": 4.981651552873193e-06,
+ "loss": 0.7066,
+ "step": 170
+ },
+ {
+ "epoch": 0.535490605427975,
+ "grad_norm": 5.133765697479248,
+ "learning_rate": 4.981124232346374e-06,
+ "loss": 0.6634,
+ "step": 171
+ },
+ {
+ "epoch": 0.5386221294363257,
+ "grad_norm": 0.9770542979240417,
+ "learning_rate": 4.980589470095161e-06,
+ "loss": 0.7121,
+ "step": 172
+ },
+ {
+ "epoch": 0.5417536534446764,
+ "grad_norm": 0.8414323925971985,
+ "learning_rate": 4.980047267723487e-06,
+ "loss": 0.6397,
+ "step": 173
+ },
+ {
+ "epoch": 0.5448851774530271,
+ "grad_norm": 1.9173879623413086,
+ "learning_rate": 4.979497626857596e-06,
+ "loss": 0.6228,
+ "step": 174
+ },
+ {
+ "epoch": 0.5480167014613778,
+ "grad_norm": 1.0823363065719604,
+ "learning_rate": 4.978940549146048e-06,
+ "loss": 0.6475,
+ "step": 175
+ },
+ {
+ "epoch": 0.5511482254697286,
+ "grad_norm": 3.715353488922119,
+ "learning_rate": 4.978376036259706e-06,
+ "loss": 0.7127,
+ "step": 176
+ },
+ {
+ "epoch": 0.5542797494780793,
+ "grad_norm": 0.981584370136261,
+ "learning_rate": 4.9778040898917325e-06,
+ "loss": 0.6468,
+ "step": 177
+ },
+ {
+ "epoch": 0.55741127348643,
+ "grad_norm": 1.70566987991333,
+ "learning_rate": 4.977224711757587e-06,
+ "loss": 0.6476,
+ "step": 178
+ },
+ {
+ "epoch": 0.5605427974947808,
+ "grad_norm": 0.9217923283576965,
+ "learning_rate": 4.976637903595019e-06,
+ "loss": 0.6731,
+ "step": 179
+ },
+ {
+ "epoch": 0.5636743215031316,
+ "grad_norm": 0.8994677662849426,
+ "learning_rate": 4.976043667164063e-06,
+ "loss": 0.6562,
+ "step": 180
+ },
+ {
+ "epoch": 0.5668058455114823,
+ "grad_norm": 1.1613017320632935,
+ "learning_rate": 4.975442004247034e-06,
+ "loss": 0.6417,
+ "step": 181
+ },
+ {
+ "epoch": 0.569937369519833,
+ "grad_norm": 1.6041977405548096,
+ "learning_rate": 4.974832916648521e-06,
+ "loss": 0.6029,
+ "step": 182
+ },
+ {
+ "epoch": 0.5730688935281837,
+ "grad_norm": 1.7978405952453613,
+ "learning_rate": 4.974216406195383e-06,
+ "loss": 0.6269,
+ "step": 183
+ },
+ {
+ "epoch": 0.5762004175365344,
+ "grad_norm": 1.6021920442581177,
+ "learning_rate": 4.973592474736739e-06,
+ "loss": 0.6149,
+ "step": 184
+ },
+ {
+ "epoch": 0.5793319415448852,
+ "grad_norm": 0.8973568677902222,
+ "learning_rate": 4.972961124143971e-06,
+ "loss": 0.6648,
+ "step": 185
+ },
+ {
+ "epoch": 0.5824634655532359,
+ "grad_norm": 1.9432591199874878,
+ "learning_rate": 4.972322356310711e-06,
+ "loss": 0.6299,
+ "step": 186
+ },
+ {
+ "epoch": 0.5855949895615866,
+ "grad_norm": 4.457028388977051,
+ "learning_rate": 4.971676173152839e-06,
+ "loss": 0.656,
+ "step": 187
+ },
+ {
+ "epoch": 0.5887265135699373,
+ "grad_norm": 2.0989716053009033,
+ "learning_rate": 4.971022576608473e-06,
+ "loss": 0.6539,
+ "step": 188
+ },
+ {
+ "epoch": 0.5918580375782881,
+ "grad_norm": 1.0646967887878418,
+ "learning_rate": 4.97036156863797e-06,
+ "loss": 0.6727,
+ "step": 189
+ },
+ {
+ "epoch": 0.5949895615866388,
+ "grad_norm": 1.6522265672683716,
+ "learning_rate": 4.969693151223914e-06,
+ "loss": 0.6643,
+ "step": 190
+ },
+ {
+ "epoch": 0.5981210855949896,
+ "grad_norm": 1.7503505945205688,
+ "learning_rate": 4.969017326371115e-06,
+ "loss": 0.6402,
+ "step": 191
+ },
+ {
+ "epoch": 0.6012526096033403,
+ "grad_norm": 1.2341989278793335,
+ "learning_rate": 4.968334096106597e-06,
+ "loss": 0.6413,
+ "step": 192
+ },
+ {
+ "epoch": 0.6043841336116911,
+ "grad_norm": 3.089054584503174,
+ "learning_rate": 4.967643462479597e-06,
+ "loss": 0.6825,
+ "step": 193
+ },
+ {
+ "epoch": 0.6075156576200418,
+ "grad_norm": 2.711623430252075,
+ "learning_rate": 4.966945427561557e-06,
+ "loss": 0.65,
+ "step": 194
+ },
+ {
+ "epoch": 0.6106471816283925,
+ "grad_norm": 4.641184329986572,
+ "learning_rate": 4.966239993446118e-06,
+ "loss": 0.6229,
+ "step": 195
+ },
+ {
+ "epoch": 0.6137787056367432,
+ "grad_norm": 1.7984074354171753,
+ "learning_rate": 4.965527162249114e-06,
+ "loss": 0.6473,
+ "step": 196
+ },
+ {
+ "epoch": 0.6169102296450939,
+ "grad_norm": 1.1643115282058716,
+ "learning_rate": 4.964806936108566e-06,
+ "loss": 0.6404,
+ "step": 197
+ },
+ {
+ "epoch": 0.6200417536534447,
+ "grad_norm": 2.1877920627593994,
+ "learning_rate": 4.9640793171846725e-06,
+ "loss": 0.6185,
+ "step": 198
+ },
+ {
+ "epoch": 0.6231732776617954,
+ "grad_norm": 1.7970566749572754,
+ "learning_rate": 4.963344307659807e-06,
+ "loss": 0.634,
+ "step": 199
+ },
+ {
+ "epoch": 0.6263048016701461,
+ "grad_norm": 1.6014361381530762,
+ "learning_rate": 4.96260190973851e-06,
+ "loss": 0.6562,
+ "step": 200
+ },
+ {
+ "epoch": 0.6294363256784968,
+ "grad_norm": 0.8743320107460022,
+ "learning_rate": 4.961852125647482e-06,
+ "loss": 0.6133,
+ "step": 201
+ },
+ {
+ "epoch": 0.6325678496868476,
+ "grad_norm": 1.9526551961898804,
+ "learning_rate": 4.961094957635578e-06,
+ "loss": 0.6451,
+ "step": 202
+ },
+ {
+ "epoch": 0.6356993736951984,
+ "grad_norm": 3.6597347259521484,
+ "learning_rate": 4.960330407973798e-06,
+ "loss": 0.6386,
+ "step": 203
+ },
+ {
+ "epoch": 0.6388308977035491,
+ "grad_norm": 1.7180207967758179,
+ "learning_rate": 4.959558478955283e-06,
+ "loss": 0.6688,
+ "step": 204
+ },
+ {
+ "epoch": 0.6419624217118998,
+ "grad_norm": 0.9058470129966736,
+ "learning_rate": 4.958779172895308e-06,
+ "loss": 0.6161,
+ "step": 205
+ },
+ {
+ "epoch": 0.6450939457202505,
+ "grad_norm": 1.0031033754348755,
+ "learning_rate": 4.957992492131274e-06,
+ "loss": 0.6437,
+ "step": 206
+ },
+ {
+ "epoch": 0.6482254697286013,
+ "grad_norm": 1.5846725702285767,
+ "learning_rate": 4.9571984390226985e-06,
+ "loss": 0.6332,
+ "step": 207
+ },
+ {
+ "epoch": 0.651356993736952,
+ "grad_norm": 1.9951609373092651,
+ "learning_rate": 4.956397015951215e-06,
+ "loss": 0.636,
+ "step": 208
+ },
+ {
+ "epoch": 0.6544885177453027,
+ "grad_norm": 1.4122583866119385,
+ "learning_rate": 4.95558822532056e-06,
+ "loss": 0.6586,
+ "step": 209
+ },
+ {
+ "epoch": 0.6576200417536534,
+ "grad_norm": 1.2243481874465942,
+ "learning_rate": 4.954772069556568e-06,
+ "loss": 0.6313,
+ "step": 210
+ },
+ {
+ "epoch": 0.6607515657620042,
+ "grad_norm": 0.8756356835365295,
+ "learning_rate": 4.953948551107164e-06,
+ "loss": 0.6406,
+ "step": 211
+ },
+ {
+ "epoch": 0.6638830897703549,
+ "grad_norm": 2.9979734420776367,
+ "learning_rate": 4.953117672442356e-06,
+ "loss": 0.5803,
+ "step": 212
+ },
+ {
+ "epoch": 0.6670146137787056,
+ "grad_norm": 2.1859359741210938,
+ "learning_rate": 4.952279436054229e-06,
+ "loss": 0.6607,
+ "step": 213
+ },
+ {
+ "epoch": 0.6701461377870563,
+ "grad_norm": 0.6929755806922913,
+ "learning_rate": 4.9514338444569346e-06,
+ "loss": 0.5989,
+ "step": 214
+ },
+ {
+ "epoch": 0.673277661795407,
+ "grad_norm": 1.0361783504486084,
+ "learning_rate": 4.950580900186685e-06,
+ "loss": 0.6654,
+ "step": 215
+ },
+ {
+ "epoch": 0.6764091858037579,
+ "grad_norm": 1.210898518562317,
+ "learning_rate": 4.9497206058017475e-06,
+ "loss": 0.6213,
+ "step": 216
+ },
+ {
+ "epoch": 0.6795407098121086,
+ "grad_norm": 1.200990080833435,
+ "learning_rate": 4.948852963882434e-06,
+ "loss": 0.6654,
+ "step": 217
+ },
+ {
+ "epoch": 0.6826722338204593,
+ "grad_norm": 1.481831669807434,
+ "learning_rate": 4.947977977031093e-06,
+ "loss": 0.6474,
+ "step": 218
+ },
+ {
+ "epoch": 0.68580375782881,
+ "grad_norm": 0.9883334636688232,
+ "learning_rate": 4.947095647872103e-06,
+ "loss": 0.6735,
+ "step": 219
+ },
+ {
+ "epoch": 0.6889352818371608,
+ "grad_norm": 0.7436536550521851,
+ "learning_rate": 4.946205979051868e-06,
+ "loss": 0.6456,
+ "step": 220
+ },
+ {
+ "epoch": 0.6920668058455115,
+ "grad_norm": 0.9057570099830627,
+ "learning_rate": 4.945308973238802e-06,
+ "loss": 0.6228,
+ "step": 221
+ },
+ {
+ "epoch": 0.6951983298538622,
+ "grad_norm": 1.341081142425537,
+ "learning_rate": 4.944404633123324e-06,
+ "loss": 0.6417,
+ "step": 222
+ },
+ {
+ "epoch": 0.6983298538622129,
+ "grad_norm": 0.7958157062530518,
+ "learning_rate": 4.943492961417859e-06,
+ "loss": 0.6494,
+ "step": 223
+ },
+ {
+ "epoch": 0.7014613778705637,
+ "grad_norm": 1.216025471687317,
+ "learning_rate": 4.9425739608568106e-06,
+ "loss": 0.6566,
+ "step": 224
+ },
+ {
+ "epoch": 0.7045929018789144,
+ "grad_norm": 0.9774854779243469,
+ "learning_rate": 4.9416476341965735e-06,
+ "loss": 0.6171,
+ "step": 225
+ },
+ {
+ "epoch": 0.7077244258872651,
+ "grad_norm": 2.1562681198120117,
+ "learning_rate": 4.940713984215512e-06,
+ "loss": 0.629,
+ "step": 226
+ },
+ {
+ "epoch": 0.7108559498956158,
+ "grad_norm": 1.9521286487579346,
+ "learning_rate": 4.9397730137139556e-06,
+ "loss": 0.6475,
+ "step": 227
+ },
+ {
+ "epoch": 0.7139874739039666,
+ "grad_norm": 1.5749104022979736,
+ "learning_rate": 4.9388247255141895e-06,
+ "loss": 0.6053,
+ "step": 228
+ },
+ {
+ "epoch": 0.7171189979123174,
+ "grad_norm": 1.2008254528045654,
+ "learning_rate": 4.937869122460449e-06,
+ "loss": 0.6052,
+ "step": 229
+ },
+ {
+ "epoch": 0.7202505219206681,
+ "grad_norm": 1.0774102210998535,
+ "learning_rate": 4.93690620741891e-06,
+ "loss": 0.6099,
+ "step": 230
+ },
+ {
+ "epoch": 0.7233820459290188,
+ "grad_norm": 1.0929996967315674,
+ "learning_rate": 4.935935983277675e-06,
+ "loss": 0.6363,
+ "step": 231
+ },
+ {
+ "epoch": 0.7265135699373695,
+ "grad_norm": 0.8830653429031372,
+ "learning_rate": 4.934958452946774e-06,
+ "loss": 0.6136,
+ "step": 232
+ },
+ {
+ "epoch": 0.7296450939457203,
+ "grad_norm": 3.591218948364258,
+ "learning_rate": 4.933973619358147e-06,
+ "loss": 0.5962,
+ "step": 233
+ },
+ {
+ "epoch": 0.732776617954071,
+ "grad_norm": 2.5797672271728516,
+ "learning_rate": 4.932981485465643e-06,
+ "loss": 0.6405,
+ "step": 234
+ },
+ {
+ "epoch": 0.7359081419624217,
+ "grad_norm": 1.0467664003372192,
+ "learning_rate": 4.9319820542450025e-06,
+ "loss": 0.6155,
+ "step": 235
+ },
+ {
+ "epoch": 0.7390396659707724,
+ "grad_norm": 0.8099795579910278,
+ "learning_rate": 4.930975328693856e-06,
+ "loss": 0.5615,
+ "step": 236
+ },
+ {
+ "epoch": 0.7421711899791231,
+ "grad_norm": 0.8906702995300293,
+ "learning_rate": 4.92996131183171e-06,
+ "loss": 0.6501,
+ "step": 237
+ },
+ {
+ "epoch": 0.7453027139874739,
+ "grad_norm": 1.0871416330337524,
+ "learning_rate": 4.928940006699944e-06,
+ "loss": 0.6282,
+ "step": 238
+ },
+ {
+ "epoch": 0.7484342379958246,
+ "grad_norm": 1.3209614753723145,
+ "learning_rate": 4.927911416361792e-06,
+ "loss": 0.598,
+ "step": 239
+ },
+ {
+ "epoch": 0.7515657620041754,
+ "grad_norm": 1.2252682447433472,
+ "learning_rate": 4.926875543902344e-06,
+ "loss": 0.6433,
+ "step": 240
+ },
+ {
+ "epoch": 0.7546972860125261,
+ "grad_norm": 1.0569007396697998,
+ "learning_rate": 4.9258323924285285e-06,
+ "loss": 0.5927,
+ "step": 241
+ },
+ {
+ "epoch": 0.7578288100208769,
+ "grad_norm": 0.9309014081954956,
+ "learning_rate": 4.924781965069106e-06,
+ "loss": 0.5927,
+ "step": 242
+ },
+ {
+ "epoch": 0.7609603340292276,
+ "grad_norm": 1.0200378894805908,
+ "learning_rate": 4.923724264974662e-06,
+ "loss": 0.6064,
+ "step": 243
+ },
+ {
+ "epoch": 0.7640918580375783,
+ "grad_norm": 1.0533075332641602,
+ "learning_rate": 4.922659295317593e-06,
+ "loss": 0.6373,
+ "step": 244
+ },
+ {
+ "epoch": 0.767223382045929,
+ "grad_norm": 0.7889382839202881,
+ "learning_rate": 4.921587059292102e-06,
+ "loss": 0.5887,
+ "step": 245
+ },
+ {
+ "epoch": 0.7703549060542797,
+ "grad_norm": 0.7943588495254517,
+ "learning_rate": 4.920507560114183e-06,
+ "loss": 0.593,
+ "step": 246
+ },
+ {
+ "epoch": 0.7734864300626305,
+ "grad_norm": 0.8247205018997192,
+ "learning_rate": 4.919420801021617e-06,
+ "loss": 0.6151,
+ "step": 247
+ },
+ {
+ "epoch": 0.7766179540709812,
+ "grad_norm": 0.9979158043861389,
+ "learning_rate": 4.91832678527396e-06,
+ "loss": 0.6019,
+ "step": 248
+ },
+ {
+ "epoch": 0.7797494780793319,
+ "grad_norm": 0.9346868991851807,
+ "learning_rate": 4.917225516152532e-06,
+ "loss": 0.6098,
+ "step": 249
+ },
+ {
+ "epoch": 0.7828810020876826,
+ "grad_norm": 0.7487881183624268,
+ "learning_rate": 4.916116996960408e-06,
+ "loss": 0.5965,
+ "step": 250
+ },
+ {
+ "epoch": 0.7860125260960334,
+ "grad_norm": 0.821576714515686,
+ "learning_rate": 4.915001231022411e-06,
+ "loss": 0.6483,
+ "step": 251
+ },
+ {
+ "epoch": 0.7891440501043842,
+ "grad_norm": 1.0413196086883545,
+ "learning_rate": 4.913878221685096e-06,
+ "loss": 0.6108,
+ "step": 252
+ },
+ {
+ "epoch": 0.7922755741127349,
+ "grad_norm": 0.9560331702232361,
+ "learning_rate": 4.912747972316745e-06,
+ "loss": 0.5758,
+ "step": 253
+ },
+ {
+ "epoch": 0.7954070981210856,
+ "grad_norm": 0.8964638113975525,
+ "learning_rate": 4.911610486307356e-06,
+ "loss": 0.6432,
+ "step": 254
+ },
+ {
+ "epoch": 0.7985386221294363,
+ "grad_norm": 0.8418346047401428,
+ "learning_rate": 4.910465767068631e-06,
+ "loss": 0.6027,
+ "step": 255
+ },
+ {
+ "epoch": 0.8016701461377871,
+ "grad_norm": 1.792371153831482,
+ "learning_rate": 4.909313818033966e-06,
+ "loss": 0.6198,
+ "step": 256
+ },
+ {
+ "epoch": 0.8048016701461378,
+ "grad_norm": 1.036665439605713,
+ "learning_rate": 4.908154642658446e-06,
+ "loss": 0.6255,
+ "step": 257
+ },
+ {
+ "epoch": 0.8079331941544885,
+ "grad_norm": 0.7592151165008545,
+ "learning_rate": 4.906988244418823e-06,
+ "loss": 0.6035,
+ "step": 258
+ },
+ {
+ "epoch": 0.8110647181628392,
+ "grad_norm": 0.8843073844909668,
+ "learning_rate": 4.90581462681352e-06,
+ "loss": 0.6299,
+ "step": 259
+ },
+ {
+ "epoch": 0.81419624217119,
+ "grad_norm": 0.9489964246749878,
+ "learning_rate": 4.9046337933626086e-06,
+ "loss": 0.5869,
+ "step": 260
+ },
+ {
+ "epoch": 0.8173277661795407,
+ "grad_norm": 0.851691722869873,
+ "learning_rate": 4.903445747607806e-06,
+ "loss": 0.603,
+ "step": 261
+ },
+ {
+ "epoch": 0.8204592901878914,
+ "grad_norm": 1.3722106218338013,
+ "learning_rate": 4.902250493112458e-06,
+ "loss": 0.5939,
+ "step": 262
+ },
+ {
+ "epoch": 0.8235908141962421,
+ "grad_norm": 1.1002827882766724,
+ "learning_rate": 4.901048033461537e-06,
+ "loss": 0.6452,
+ "step": 263
+ },
+ {
+ "epoch": 0.826722338204593,
+ "grad_norm": 0.8428632020950317,
+ "learning_rate": 4.89983837226162e-06,
+ "loss": 0.5956,
+ "step": 264
+ },
+ {
+ "epoch": 0.8298538622129437,
+ "grad_norm": 0.7666584849357605,
+ "learning_rate": 4.898621513140889e-06,
+ "loss": 0.6067,
+ "step": 265
+ },
+ {
+ "epoch": 0.8329853862212944,
+ "grad_norm": 0.8413611054420471,
+ "learning_rate": 4.897397459749113e-06,
+ "loss": 0.5985,
+ "step": 266
+ },
+ {
+ "epoch": 0.8361169102296451,
+ "grad_norm": 2.3374335765838623,
+ "learning_rate": 4.896166215757638e-06,
+ "loss": 0.5885,
+ "step": 267
+ },
+ {
+ "epoch": 0.8392484342379958,
+ "grad_norm": 2.236640214920044,
+ "learning_rate": 4.894927784859377e-06,
+ "loss": 0.6408,
+ "step": 268
+ },
+ {
+ "epoch": 0.8423799582463466,
+ "grad_norm": 0.9715856313705444,
+ "learning_rate": 4.893682170768802e-06,
+ "loss": 0.5954,
+ "step": 269
+ },
+ {
+ "epoch": 0.8455114822546973,
+ "grad_norm": 1.0249912738800049,
+ "learning_rate": 4.892429377221928e-06,
+ "loss": 0.6186,
+ "step": 270
+ },
+ {
+ "epoch": 0.848643006263048,
+ "grad_norm": 1.255426049232483,
+ "learning_rate": 4.891169407976302e-06,
+ "loss": 0.6351,
+ "step": 271
+ },
+ {
+ "epoch": 0.8517745302713987,
+ "grad_norm": 0.9339559674263,
+ "learning_rate": 4.889902266810995e-06,
+ "loss": 0.5944,
+ "step": 272
+ },
+ {
+ "epoch": 0.8549060542797495,
+ "grad_norm": 1.2473429441452026,
+ "learning_rate": 4.888627957526589e-06,
+ "loss": 0.544,
+ "step": 273
+ },
+ {
+ "epoch": 0.8580375782881002,
+ "grad_norm": 1.0589442253112793,
+ "learning_rate": 4.887346483945166e-06,
+ "loss": 0.5543,
+ "step": 274
+ },
+ {
+ "epoch": 0.8611691022964509,
+ "grad_norm": 0.9844024777412415,
+ "learning_rate": 4.886057849910294e-06,
+ "loss": 0.5941,
+ "step": 275
+ },
+ {
+ "epoch": 0.8643006263048016,
+ "grad_norm": 2.88578200340271,
+ "learning_rate": 4.8847620592870196e-06,
+ "loss": 0.6124,
+ "step": 276
+ },
+ {
+ "epoch": 0.8674321503131524,
+ "grad_norm": 0.7496054172515869,
+ "learning_rate": 4.8834591159618524e-06,
+ "loss": 0.6006,
+ "step": 277
+ },
+ {
+ "epoch": 0.8705636743215032,
+ "grad_norm": 0.7403052449226379,
+ "learning_rate": 4.88214902384276e-06,
+ "loss": 0.5911,
+ "step": 278
+ },
+ {
+ "epoch": 0.8736951983298539,
+ "grad_norm": 0.9003771543502808,
+ "learning_rate": 4.880831786859146e-06,
+ "loss": 0.6347,
+ "step": 279
+ },
+ {
+ "epoch": 0.8768267223382046,
+ "grad_norm": 1.0345501899719238,
+ "learning_rate": 4.879507408961847e-06,
+ "loss": 0.6111,
+ "step": 280
+ },
+ {
+ "epoch": 0.8799582463465553,
+ "grad_norm": 1.4385879039764404,
+ "learning_rate": 4.878175894123116e-06,
+ "loss": 0.6454,
+ "step": 281
+ },
+ {
+ "epoch": 0.8830897703549061,
+ "grad_norm": 0.8469482064247131,
+ "learning_rate": 4.8768372463366145e-06,
+ "loss": 0.6163,
+ "step": 282
+ },
+ {
+ "epoch": 0.8862212943632568,
+ "grad_norm": 0.8859589695930481,
+ "learning_rate": 4.875491469617395e-06,
+ "loss": 0.6144,
+ "step": 283
+ },
+ {
+ "epoch": 0.8893528183716075,
+ "grad_norm": 1.8436834812164307,
+ "learning_rate": 4.874138568001895e-06,
+ "loss": 0.6275,
+ "step": 284
+ },
+ {
+ "epoch": 0.8924843423799582,
+ "grad_norm": 0.6646101474761963,
+ "learning_rate": 4.87277854554792e-06,
+ "loss": 0.615,
+ "step": 285
+ },
+ {
+ "epoch": 0.8956158663883089,
+ "grad_norm": 1.0070925951004028,
+ "learning_rate": 4.871411406334633e-06,
+ "loss": 0.5898,
+ "step": 286
+ },
+ {
+ "epoch": 0.8987473903966597,
+ "grad_norm": 0.9785194993019104,
+ "learning_rate": 4.870037154462545e-06,
+ "loss": 0.5992,
+ "step": 287
+ },
+ {
+ "epoch": 0.9018789144050104,
+ "grad_norm": 0.7244889736175537,
+ "learning_rate": 4.868655794053497e-06,
+ "loss": 0.6078,
+ "step": 288
+ },
+ {
+ "epoch": 0.9050104384133612,
+ "grad_norm": 1.4496444463729858,
+ "learning_rate": 4.8672673292506535e-06,
+ "loss": 0.5855,
+ "step": 289
+ },
+ {
+ "epoch": 0.9081419624217119,
+ "grad_norm": 1.8514957427978516,
+ "learning_rate": 4.865871764218486e-06,
+ "loss": 0.5707,
+ "step": 290
+ },
+ {
+ "epoch": 0.9112734864300627,
+ "grad_norm": 0.8439773321151733,
+ "learning_rate": 4.864469103142763e-06,
+ "loss": 0.5562,
+ "step": 291
+ },
+ {
+ "epoch": 0.9144050104384134,
+ "grad_norm": 0.8146086931228638,
+ "learning_rate": 4.8630593502305355e-06,
+ "loss": 0.6161,
+ "step": 292
+ },
+ {
+ "epoch": 0.9175365344467641,
+ "grad_norm": 0.8920315504074097,
+ "learning_rate": 4.861642509710126e-06,
+ "loss": 0.6139,
+ "step": 293
+ },
+ {
+ "epoch": 0.9206680584551148,
+ "grad_norm": 1.4980088472366333,
+ "learning_rate": 4.860218585831116e-06,
+ "loss": 0.6187,
+ "step": 294
+ },
+ {
+ "epoch": 0.9237995824634656,
+ "grad_norm": 0.9910127520561218,
+ "learning_rate": 4.8587875828643285e-06,
+ "loss": 0.5852,
+ "step": 295
+ },
+ {
+ "epoch": 0.9269311064718163,
+ "grad_norm": 0.819600522518158,
+ "learning_rate": 4.857349505101823e-06,
+ "loss": 0.6172,
+ "step": 296
+ },
+ {
+ "epoch": 0.930062630480167,
+ "grad_norm": 1.1059772968292236,
+ "learning_rate": 4.855904356856878e-06,
+ "loss": 0.5868,
+ "step": 297
+ },
+ {
+ "epoch": 0.9331941544885177,
+ "grad_norm": 1.2362196445465088,
+ "learning_rate": 4.854452142463977e-06,
+ "loss": 0.625,
+ "step": 298
+ },
+ {
+ "epoch": 0.9363256784968684,
+ "grad_norm": 0.9956470727920532,
+ "learning_rate": 4.852992866278799e-06,
+ "loss": 0.5923,
+ "step": 299
+ },
+ {
+ "epoch": 0.9394572025052192,
+ "grad_norm": 0.864109218120575,
+ "learning_rate": 4.851526532678203e-06,
+ "loss": 0.6315,
+ "step": 300
+ },
+ {
+ "epoch": 0.94258872651357,
+ "grad_norm": 0.8900614380836487,
+ "learning_rate": 4.850053146060217e-06,
+ "loss": 0.6128,
+ "step": 301
+ },
+ {
+ "epoch": 0.9457202505219207,
+ "grad_norm": 0.927254855632782,
+ "learning_rate": 4.84857271084402e-06,
+ "loss": 0.5955,
+ "step": 302
+ },
+ {
+ "epoch": 0.9488517745302714,
+ "grad_norm": 1.0046517848968506,
+ "learning_rate": 4.847085231469935e-06,
+ "loss": 0.6134,
+ "step": 303
+ },
+ {
+ "epoch": 0.9519832985386222,
+ "grad_norm": 0.734597384929657,
+ "learning_rate": 4.8455907123994125e-06,
+ "loss": 0.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.9551148225469729,
+ "grad_norm": 0.7338348031044006,
+ "learning_rate": 4.844089158115016e-06,
+ "loss": 0.5897,
+ "step": 305
+ },
+ {
+ "epoch": 0.9582463465553236,
+ "grad_norm": 0.9163988828659058,
+ "learning_rate": 4.8425805731204106e-06,
+ "loss": 0.6051,
+ "step": 306
+ },
+ {
+ "epoch": 0.9613778705636743,
+ "grad_norm": 1.050246238708496,
+ "learning_rate": 4.84106496194035e-06,
+ "loss": 0.5751,
+ "step": 307
+ },
+ {
+ "epoch": 0.964509394572025,
+ "grad_norm": 0.7637603878974915,
+ "learning_rate": 4.83954232912066e-06,
+ "loss": 0.5677,
+ "step": 308
+ },
+ {
+ "epoch": 0.9676409185803758,
+ "grad_norm": 0.7110525965690613,
+ "learning_rate": 4.838012679228229e-06,
+ "loss": 0.6051,
+ "step": 309
+ },
+ {
+ "epoch": 0.9707724425887265,
+ "grad_norm": 0.7662068605422974,
+ "learning_rate": 4.836476016850988e-06,
+ "loss": 0.59,
+ "step": 310
+ },
+ {
+ "epoch": 0.9739039665970772,
+ "grad_norm": 0.8907375335693359,
+ "learning_rate": 4.834932346597906e-06,
+ "loss": 0.5792,
+ "step": 311
+ },
+ {
+ "epoch": 0.9770354906054279,
+ "grad_norm": 0.8939849138259888,
+ "learning_rate": 4.833381673098966e-06,
+ "loss": 0.6062,
+ "step": 312
+ },
+ {
+ "epoch": 0.9801670146137788,
+ "grad_norm": 0.8878788948059082,
+ "learning_rate": 4.8318240010051595e-06,
+ "loss": 0.5694,
+ "step": 313
+ },
+ {
+ "epoch": 0.9832985386221295,
+ "grad_norm": 1.2523870468139648,
+ "learning_rate": 4.830259334988468e-06,
+ "loss": 0.5809,
+ "step": 314
+ },
+ {
+ "epoch": 0.9864300626304802,
+ "grad_norm": 1.0836797952651978,
+ "learning_rate": 4.82868767974185e-06,
+ "loss": 0.5949,
+ "step": 315
+ },
+ {
+ "epoch": 0.9895615866388309,
+ "grad_norm": 0.7985473871231079,
+ "learning_rate": 4.827109039979226e-06,
+ "loss": 0.6057,
+ "step": 316
+ },
+ {
+ "epoch": 0.9926931106471816,
+ "grad_norm": 1.042951226234436,
+ "learning_rate": 4.825523420435469e-06,
+ "loss": 0.6004,
+ "step": 317
+ },
+ {
+ "epoch": 0.9958246346555324,
+ "grad_norm": 0.7845115661621094,
+ "learning_rate": 4.823930825866381e-06,
+ "loss": 0.6161,
+ "step": 318
+ },
+ {
+ "epoch": 0.9989561586638831,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.82233126104869e-06,
+ "loss": 0.5912,
+ "step": 319
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.8207247307800275e-06,
+ "loss": 0.1914,
+ "step": 320
+ },
+ {
+ "epoch": 1.0031315240083507,
+ "grad_norm": 0.751028835773468,
+ "learning_rate": 4.819111239878916e-06,
+ "loss": 0.5802,
+ "step": 321
+ },
+ {
+ "epoch": 1.0062630480167014,
+ "grad_norm": 1.4943569898605347,
+ "learning_rate": 4.817490793184758e-06,
+ "loss": 0.613,
+ "step": 322
+ },
+ {
+ "epoch": 1.0093945720250521,
+ "grad_norm": 2.296318531036377,
+ "learning_rate": 4.815863395557816e-06,
+ "loss": 0.5453,
+ "step": 323
+ },
+ {
+ "epoch": 1.0125260960334028,
+ "grad_norm": 0.760101318359375,
+ "learning_rate": 4.814229051879202e-06,
+ "loss": 0.5302,
+ "step": 324
+ },
+ {
+ "epoch": 1.0156576200417538,
+ "grad_norm": 0.8145846128463745,
+ "learning_rate": 4.812587767050861e-06,
+ "loss": 0.5831,
+ "step": 325
+ },
+ {
+ "epoch": 1.0187891440501045,
+ "grad_norm": 0.9169796109199524,
+ "learning_rate": 4.8109395459955565e-06,
+ "loss": 0.5756,
+ "step": 326
+ },
+ {
+ "epoch": 1.0219206680584552,
+ "grad_norm": 0.8791524171829224,
+ "learning_rate": 4.809284393656858e-06,
+ "loss": 0.5988,
+ "step": 327
+ },
+ {
+ "epoch": 1.0250521920668059,
+ "grad_norm": 1.0184170007705688,
+ "learning_rate": 4.807622314999122e-06,
+ "loss": 0.5476,
+ "step": 328
+ },
+ {
+ "epoch": 1.0281837160751566,
+ "grad_norm": 0.8095184564590454,
+ "learning_rate": 4.8059533150074805e-06,
+ "loss": 0.5723,
+ "step": 329
+ },
+ {
+ "epoch": 1.0313152400835073,
+ "grad_norm": 0.7621930241584778,
+ "learning_rate": 4.804277398687826e-06,
+ "loss": 0.5841,
+ "step": 330
+ },
+ {
+ "epoch": 1.034446764091858,
+ "grad_norm": 3.729628324508667,
+ "learning_rate": 4.802594571066791e-06,
+ "loss": 0.5639,
+ "step": 331
+ },
+ {
+ "epoch": 1.0375782881002087,
+ "grad_norm": 1.6502974033355713,
+ "learning_rate": 4.800904837191743e-06,
+ "loss": 0.6024,
+ "step": 332
+ },
+ {
+ "epoch": 1.0407098121085594,
+ "grad_norm": 0.8031198978424072,
+ "learning_rate": 4.799208202130762e-06,
+ "loss": 0.5305,
+ "step": 333
+ },
+ {
+ "epoch": 1.0438413361169103,
+ "grad_norm": 0.939644992351532,
+ "learning_rate": 4.797504670972623e-06,
+ "loss": 0.5446,
+ "step": 334
+ },
+ {
+ "epoch": 1.046972860125261,
+ "grad_norm": 1.0589954853057861,
+ "learning_rate": 4.795794248826789e-06,
+ "loss": 0.5366,
+ "step": 335
+ },
+ {
+ "epoch": 1.0501043841336117,
+ "grad_norm": 0.9089614748954773,
+ "learning_rate": 4.794076940823391e-06,
+ "loss": 0.5795,
+ "step": 336
+ },
+ {
+ "epoch": 1.0532359081419624,
+ "grad_norm": 0.7732561230659485,
+ "learning_rate": 4.792352752113212e-06,
+ "loss": 0.5765,
+ "step": 337
+ },
+ {
+ "epoch": 1.0563674321503131,
+ "grad_norm": 1.811553955078125,
+ "learning_rate": 4.790621687867672e-06,
+ "loss": 0.561,
+ "step": 338
+ },
+ {
+ "epoch": 1.0594989561586639,
+ "grad_norm": 1.1930758953094482,
+ "learning_rate": 4.788883753278813e-06,
+ "loss": 0.5,
+ "step": 339
+ },
+ {
+ "epoch": 1.0626304801670146,
+ "grad_norm": 0.9551813006401062,
+ "learning_rate": 4.787138953559285e-06,
+ "loss": 0.5228,
+ "step": 340
+ },
+ {
+ "epoch": 1.0657620041753653,
+ "grad_norm": 0.9609586596488953,
+ "learning_rate": 4.785387293942329e-06,
+ "loss": 0.5827,
+ "step": 341
+ },
+ {
+ "epoch": 1.068893528183716,
+ "grad_norm": 0.8403449654579163,
+ "learning_rate": 4.783628779681759e-06,
+ "loss": 0.5585,
+ "step": 342
+ },
+ {
+ "epoch": 1.072025052192067,
+ "grad_norm": 0.9108251929283142,
+ "learning_rate": 4.7818634160519496e-06,
+ "loss": 0.6077,
+ "step": 343
+ },
+ {
+ "epoch": 1.0751565762004176,
+ "grad_norm": 0.9476898908615112,
+ "learning_rate": 4.780091208347819e-06,
+ "loss": 0.5493,
+ "step": 344
+ },
+ {
+ "epoch": 1.0782881002087683,
+ "grad_norm": 1.1943707466125488,
+ "learning_rate": 4.778312161884813e-06,
+ "loss": 0.5736,
+ "step": 345
+ },
+ {
+ "epoch": 1.081419624217119,
+ "grad_norm": 3.1342639923095703,
+ "learning_rate": 4.77652628199889e-06,
+ "loss": 0.5765,
+ "step": 346
+ },
+ {
+ "epoch": 1.0845511482254697,
+ "grad_norm": 2.7982125282287598,
+ "learning_rate": 4.7747335740465015e-06,
+ "loss": 0.6003,
+ "step": 347
+ },
+ {
+ "epoch": 1.0876826722338204,
+ "grad_norm": 1.5068914890289307,
+ "learning_rate": 4.7729340434045815e-06,
+ "loss": 0.5033,
+ "step": 348
+ },
+ {
+ "epoch": 1.0908141962421711,
+ "grad_norm": 0.8273429274559021,
+ "learning_rate": 4.771127695470527e-06,
+ "loss": 0.5309,
+ "step": 349
+ },
+ {
+ "epoch": 1.0939457202505218,
+ "grad_norm": 1.104974389076233,
+ "learning_rate": 4.76931453566218e-06,
+ "loss": 0.5244,
+ "step": 350
+ },
+ {
+ "epoch": 1.0970772442588728,
+ "grad_norm": 1.096509337425232,
+ "learning_rate": 4.7674945694178166e-06,
+ "loss": 0.5585,
+ "step": 351
+ },
+ {
+ "epoch": 1.1002087682672235,
+ "grad_norm": 1.0238200426101685,
+ "learning_rate": 4.765667802196127e-06,
+ "loss": 0.5589,
+ "step": 352
+ },
+ {
+ "epoch": 1.1033402922755742,
+ "grad_norm": 0.7515526413917542,
+ "learning_rate": 4.763834239476197e-06,
+ "loss": 0.5304,
+ "step": 353
+ },
+ {
+ "epoch": 1.1064718162839249,
+ "grad_norm": 1.0282566547393799,
+ "learning_rate": 4.761993886757499e-06,
+ "loss": 0.5476,
+ "step": 354
+ },
+ {
+ "epoch": 1.1096033402922756,
+ "grad_norm": 0.9962708950042725,
+ "learning_rate": 4.760146749559868e-06,
+ "loss": 0.5117,
+ "step": 355
+ },
+ {
+ "epoch": 1.1127348643006263,
+ "grad_norm": 0.7851671576499939,
+ "learning_rate": 4.758292833423488e-06,
+ "loss": 0.5542,
+ "step": 356
+ },
+ {
+ "epoch": 1.115866388308977,
+ "grad_norm": 0.8857759237289429,
+ "learning_rate": 4.756432143908876e-06,
+ "loss": 0.544,
+ "step": 357
+ },
+ {
+ "epoch": 1.1189979123173277,
+ "grad_norm": 0.9402740597724915,
+ "learning_rate": 4.7545646865968645e-06,
+ "loss": 0.5656,
+ "step": 358
+ },
+ {
+ "epoch": 1.1221294363256784,
+ "grad_norm": 0.8210407495498657,
+ "learning_rate": 4.752690467088584e-06,
+ "loss": 0.5733,
+ "step": 359
+ },
+ {
+ "epoch": 1.1252609603340291,
+ "grad_norm": 0.795684278011322,
+ "learning_rate": 4.750809491005449e-06,
+ "loss": 0.5678,
+ "step": 360
+ },
+ {
+ "epoch": 1.12839248434238,
+ "grad_norm": 0.8712463974952698,
+ "learning_rate": 4.748921763989139e-06,
+ "loss": 0.5777,
+ "step": 361
+ },
+ {
+ "epoch": 1.1315240083507307,
+ "grad_norm": 0.9810119867324829,
+ "learning_rate": 4.747027291701578e-06,
+ "loss": 0.5511,
+ "step": 362
+ },
+ {
+ "epoch": 1.1346555323590815,
+ "grad_norm": 0.81117844581604,
+ "learning_rate": 4.745126079824926e-06,
+ "loss": 0.5038,
+ "step": 363
+ },
+ {
+ "epoch": 1.1377870563674322,
+ "grad_norm": 0.7631494402885437,
+ "learning_rate": 4.743218134061556e-06,
+ "loss": 0.6272,
+ "step": 364
+ },
+ {
+ "epoch": 1.1409185803757829,
+ "grad_norm": 0.7601696252822876,
+ "learning_rate": 4.741303460134038e-06,
+ "loss": 0.571,
+ "step": 365
+ },
+ {
+ "epoch": 1.1440501043841336,
+ "grad_norm": 1.7977744340896606,
+ "learning_rate": 4.7393820637851205e-06,
+ "loss": 0.538,
+ "step": 366
+ },
+ {
+ "epoch": 1.1471816283924843,
+ "grad_norm": 2.022578001022339,
+ "learning_rate": 4.737453950777718e-06,
+ "loss": 0.5822,
+ "step": 367
+ },
+ {
+ "epoch": 1.150313152400835,
+ "grad_norm": 0.7586764693260193,
+ "learning_rate": 4.735519126894885e-06,
+ "loss": 0.5986,
+ "step": 368
+ },
+ {
+ "epoch": 1.153444676409186,
+ "grad_norm": 0.8970286846160889,
+ "learning_rate": 4.733577597939812e-06,
+ "loss": 0.542,
+ "step": 369
+ },
+ {
+ "epoch": 1.1565762004175366,
+ "grad_norm": 0.8546352982521057,
+ "learning_rate": 4.731629369735793e-06,
+ "loss": 0.5832,
+ "step": 370
+ },
+ {
+ "epoch": 1.1597077244258873,
+ "grad_norm": 0.9266164898872375,
+ "learning_rate": 4.72967444812622e-06,
+ "loss": 0.551,
+ "step": 371
+ },
+ {
+ "epoch": 1.162839248434238,
+ "grad_norm": 1.0413658618927002,
+ "learning_rate": 4.7277128389745595e-06,
+ "loss": 0.5866,
+ "step": 372
+ },
+ {
+ "epoch": 1.1659707724425887,
+ "grad_norm": 0.9312199950218201,
+ "learning_rate": 4.7257445481643334e-06,
+ "loss": 0.5723,
+ "step": 373
+ },
+ {
+ "epoch": 1.1691022964509394,
+ "grad_norm": 0.7389806509017944,
+ "learning_rate": 4.723769581599109e-06,
+ "loss": 0.5209,
+ "step": 374
+ },
+ {
+ "epoch": 1.1722338204592901,
+ "grad_norm": 3.053169012069702,
+ "learning_rate": 4.721787945202472e-06,
+ "loss": 0.6094,
+ "step": 375
+ },
+ {
+ "epoch": 1.1753653444676408,
+ "grad_norm": 1.288589596748352,
+ "learning_rate": 4.719799644918017e-06,
+ "loss": 0.5616,
+ "step": 376
+ },
+ {
+ "epoch": 1.1784968684759916,
+ "grad_norm": 0.7675042152404785,
+ "learning_rate": 4.717804686709323e-06,
+ "loss": 0.4963,
+ "step": 377
+ },
+ {
+ "epoch": 1.1816283924843423,
+ "grad_norm": 0.7246491312980652,
+ "learning_rate": 4.715803076559938e-06,
+ "loss": 0.5273,
+ "step": 378
+ },
+ {
+ "epoch": 1.1847599164926932,
+ "grad_norm": 0.8193361759185791,
+ "learning_rate": 4.713794820473366e-06,
+ "loss": 0.6107,
+ "step": 379
+ },
+ {
+ "epoch": 1.187891440501044,
+ "grad_norm": 0.9498510360717773,
+ "learning_rate": 4.711779924473037e-06,
+ "loss": 0.5421,
+ "step": 380
+ },
+ {
+ "epoch": 1.1910229645093946,
+ "grad_norm": 1.0479756593704224,
+ "learning_rate": 4.709758394602305e-06,
+ "loss": 0.5257,
+ "step": 381
+ },
+ {
+ "epoch": 1.1941544885177453,
+ "grad_norm": 0.907866895198822,
+ "learning_rate": 4.707730236924413e-06,
+ "loss": 0.5289,
+ "step": 382
+ },
+ {
+ "epoch": 1.197286012526096,
+ "grad_norm": 0.8861165642738342,
+ "learning_rate": 4.705695457522488e-06,
+ "loss": 0.5727,
+ "step": 383
+ },
+ {
+ "epoch": 1.2004175365344467,
+ "grad_norm": 0.7467761039733887,
+ "learning_rate": 4.703654062499516e-06,
+ "loss": 0.5602,
+ "step": 384
+ },
+ {
+ "epoch": 1.2035490605427974,
+ "grad_norm": 0.7456198334693909,
+ "learning_rate": 4.701606057978325e-06,
+ "loss": 0.5345,
+ "step": 385
+ },
+ {
+ "epoch": 1.2066805845511483,
+ "grad_norm": 1.9976060390472412,
+ "learning_rate": 4.699551450101571e-06,
+ "loss": 0.5504,
+ "step": 386
+ },
+ {
+ "epoch": 1.209812108559499,
+ "grad_norm": 1.5253807306289673,
+ "learning_rate": 4.697490245031709e-06,
+ "loss": 0.5568,
+ "step": 387
+ },
+ {
+ "epoch": 1.2129436325678498,
+ "grad_norm": 1.0786075592041016,
+ "learning_rate": 4.6954224489509885e-06,
+ "loss": 0.5564,
+ "step": 388
+ },
+ {
+ "epoch": 1.2160751565762005,
+ "grad_norm": 0.8385995030403137,
+ "learning_rate": 4.693348068061422e-06,
+ "loss": 0.5341,
+ "step": 389
+ },
+ {
+ "epoch": 1.2192066805845512,
+ "grad_norm": 0.8184949159622192,
+ "learning_rate": 4.691267108584774e-06,
+ "loss": 0.5614,
+ "step": 390
+ },
+ {
+ "epoch": 1.2223382045929019,
+ "grad_norm": 0.9964898824691772,
+ "learning_rate": 4.68917957676254e-06,
+ "loss": 0.5589,
+ "step": 391
+ },
+ {
+ "epoch": 1.2254697286012526,
+ "grad_norm": 1.0168914794921875,
+ "learning_rate": 4.687085478855931e-06,
+ "loss": 0.5892,
+ "step": 392
+ },
+ {
+ "epoch": 1.2286012526096033,
+ "grad_norm": 0.8841140866279602,
+ "learning_rate": 4.684984821145846e-06,
+ "loss": 0.5327,
+ "step": 393
+ },
+ {
+ "epoch": 1.231732776617954,
+ "grad_norm": 0.834431529045105,
+ "learning_rate": 4.682877609932866e-06,
+ "loss": 0.5594,
+ "step": 394
+ },
+ {
+ "epoch": 1.2348643006263047,
+ "grad_norm": 0.7256641983985901,
+ "learning_rate": 4.6807638515372234e-06,
+ "loss": 0.5443,
+ "step": 395
+ },
+ {
+ "epoch": 1.2379958246346556,
+ "grad_norm": 0.765096127986908,
+ "learning_rate": 4.678643552298788e-06,
+ "loss": 0.5439,
+ "step": 396
+ },
+ {
+ "epoch": 1.2411273486430063,
+ "grad_norm": 0.8760455846786499,
+ "learning_rate": 4.676516718577051e-06,
+ "loss": 0.5485,
+ "step": 397
+ },
+ {
+ "epoch": 1.244258872651357,
+ "grad_norm": 2.7111501693725586,
+ "learning_rate": 4.674383356751099e-06,
+ "loss": 0.5696,
+ "step": 398
+ },
+ {
+ "epoch": 1.2473903966597077,
+ "grad_norm": 1.0521738529205322,
+ "learning_rate": 4.672243473219601e-06,
+ "loss": 0.5503,
+ "step": 399
+ },
+ {
+ "epoch": 1.2505219206680585,
+ "grad_norm": 0.8909669518470764,
+ "learning_rate": 4.670097074400785e-06,
+ "loss": 0.5183,
+ "step": 400
+ },
+ {
+ "epoch": 1.2536534446764092,
+ "grad_norm": 0.7483847737312317,
+ "learning_rate": 4.667944166732424e-06,
+ "loss": 0.5669,
+ "step": 401
+ },
+ {
+ "epoch": 1.2567849686847599,
+ "grad_norm": 1.146997094154358,
+ "learning_rate": 4.665784756671808e-06,
+ "loss": 0.5464,
+ "step": 402
+ },
+ {
+ "epoch": 1.2599164926931106,
+ "grad_norm": 0.8998096585273743,
+ "learning_rate": 4.663618850695733e-06,
+ "loss": 0.5502,
+ "step": 403
+ },
+ {
+ "epoch": 1.2630480167014615,
+ "grad_norm": 0.8882688283920288,
+ "learning_rate": 4.6614464553004795e-06,
+ "loss": 0.5507,
+ "step": 404
+ },
+ {
+ "epoch": 1.2661795407098122,
+ "grad_norm": 0.8310684561729431,
+ "learning_rate": 4.659267577001789e-06,
+ "loss": 0.5164,
+ "step": 405
+ },
+ {
+ "epoch": 1.269311064718163,
+ "grad_norm": 0.9286114573478699,
+ "learning_rate": 4.657082222334851e-06,
+ "loss": 0.4813,
+ "step": 406
+ },
+ {
+ "epoch": 1.2724425887265136,
+ "grad_norm": 1.2394906282424927,
+ "learning_rate": 4.654890397854275e-06,
+ "loss": 0.5837,
+ "step": 407
+ },
+ {
+ "epoch": 1.2755741127348643,
+ "grad_norm": 4.00585412979126,
+ "learning_rate": 4.652692110134079e-06,
+ "loss": 0.5453,
+ "step": 408
+ },
+ {
+ "epoch": 1.278705636743215,
+ "grad_norm": 1.1667803525924683,
+ "learning_rate": 4.650487365767667e-06,
+ "loss": 0.5652,
+ "step": 409
+ },
+ {
+ "epoch": 1.2818371607515657,
+ "grad_norm": 0.9351289868354797,
+ "learning_rate": 4.648276171367807e-06,
+ "loss": 0.5576,
+ "step": 410
+ },
+ {
+ "epoch": 1.2849686847599164,
+ "grad_norm": 0.8107728958129883,
+ "learning_rate": 4.646058533566614e-06,
+ "loss": 0.5821,
+ "step": 411
+ },
+ {
+ "epoch": 1.2881002087682671,
+ "grad_norm": 0.7293011546134949,
+ "learning_rate": 4.643834459015525e-06,
+ "loss": 0.5363,
+ "step": 412
+ },
+ {
+ "epoch": 1.2912317327766178,
+ "grad_norm": 0.7550690770149231,
+ "learning_rate": 4.641603954385289e-06,
+ "loss": 0.53,
+ "step": 413
+ },
+ {
+ "epoch": 1.2943632567849686,
+ "grad_norm": 0.7626177072525024,
+ "learning_rate": 4.639367026365938e-06,
+ "loss": 0.5307,
+ "step": 414
+ },
+ {
+ "epoch": 1.2974947807933195,
+ "grad_norm": 1.0841096639633179,
+ "learning_rate": 4.637123681666769e-06,
+ "loss": 0.5162,
+ "step": 415
+ },
+ {
+ "epoch": 1.3006263048016702,
+ "grad_norm": 0.8814271092414856,
+ "learning_rate": 4.634873927016326e-06,
+ "loss": 0.5369,
+ "step": 416
+ },
+ {
+ "epoch": 1.303757828810021,
+ "grad_norm": 0.7402971982955933,
+ "learning_rate": 4.632617769162378e-06,
+ "loss": 0.5846,
+ "step": 417
+ },
+ {
+ "epoch": 1.3068893528183716,
+ "grad_norm": 0.8106061220169067,
+ "learning_rate": 4.6303552148719e-06,
+ "loss": 0.5289,
+ "step": 418
+ },
+ {
+ "epoch": 1.3100208768267223,
+ "grad_norm": 0.9241361618041992,
+ "learning_rate": 4.628086270931053e-06,
+ "loss": 0.5714,
+ "step": 419
+ },
+ {
+ "epoch": 1.313152400835073,
+ "grad_norm": 0.950332522392273,
+ "learning_rate": 4.625810944145159e-06,
+ "loss": 0.5817,
+ "step": 420
+ },
+ {
+ "epoch": 1.316283924843424,
+ "grad_norm": 0.9037718772888184,
+ "learning_rate": 4.623529241338689e-06,
+ "loss": 0.5902,
+ "step": 421
+ },
+ {
+ "epoch": 1.3194154488517746,
+ "grad_norm": 1.2110658884048462,
+ "learning_rate": 4.621241169355234e-06,
+ "loss": 0.561,
+ "step": 422
+ },
+ {
+ "epoch": 1.3225469728601253,
+ "grad_norm": 0.8582742214202881,
+ "learning_rate": 4.618946735057491e-06,
+ "loss": 0.5003,
+ "step": 423
+ },
+ {
+ "epoch": 1.325678496868476,
+ "grad_norm": 0.9203405976295471,
+ "learning_rate": 4.6166459453272386e-06,
+ "loss": 0.5639,
+ "step": 424
+ },
+ {
+ "epoch": 1.3288100208768268,
+ "grad_norm": 0.933721125125885,
+ "learning_rate": 4.614338807065317e-06,
+ "loss": 0.5766,
+ "step": 425
+ },
+ {
+ "epoch": 1.3319415448851775,
+ "grad_norm": 0.8435131311416626,
+ "learning_rate": 4.612025327191608e-06,
+ "loss": 0.5656,
+ "step": 426
+ },
+ {
+ "epoch": 1.3350730688935282,
+ "grad_norm": 0.795796811580658,
+ "learning_rate": 4.609705512645015e-06,
+ "loss": 0.4996,
+ "step": 427
+ },
+ {
+ "epoch": 1.3382045929018789,
+ "grad_norm": 0.8168228268623352,
+ "learning_rate": 4.6073793703834404e-06,
+ "loss": 0.5465,
+ "step": 428
+ },
+ {
+ "epoch": 1.3413361169102296,
+ "grad_norm": 0.8795569539070129,
+ "learning_rate": 4.605046907383765e-06,
+ "loss": 0.5407,
+ "step": 429
+ },
+ {
+ "epoch": 1.3444676409185803,
+ "grad_norm": 0.8504094481468201,
+ "learning_rate": 4.6027081306418295e-06,
+ "loss": 0.5589,
+ "step": 430
+ },
+ {
+ "epoch": 1.347599164926931,
+ "grad_norm": 1.485202431678772,
+ "learning_rate": 4.600363047172409e-06,
+ "loss": 0.5515,
+ "step": 431
+ },
+ {
+ "epoch": 1.350730688935282,
+ "grad_norm": 1.1156851053237915,
+ "learning_rate": 4.598011664009197e-06,
+ "loss": 0.5681,
+ "step": 432
+ },
+ {
+ "epoch": 1.3538622129436326,
+ "grad_norm": 0.8666876554489136,
+ "learning_rate": 4.595653988204779e-06,
+ "loss": 0.5451,
+ "step": 433
+ },
+ {
+ "epoch": 1.3569937369519833,
+ "grad_norm": 0.8192381858825684,
+ "learning_rate": 4.593290026830619e-06,
+ "loss": 0.5632,
+ "step": 434
+ },
+ {
+ "epoch": 1.360125260960334,
+ "grad_norm": 0.7994804978370667,
+ "learning_rate": 4.590919786977029e-06,
+ "loss": 0.5181,
+ "step": 435
+ },
+ {
+ "epoch": 1.3632567849686847,
+ "grad_norm": 0.8038607835769653,
+ "learning_rate": 4.5885432757531535e-06,
+ "loss": 0.5385,
+ "step": 436
+ },
+ {
+ "epoch": 1.3663883089770354,
+ "grad_norm": 0.7677503824234009,
+ "learning_rate": 4.586160500286948e-06,
+ "loss": 0.5455,
+ "step": 437
+ },
+ {
+ "epoch": 1.3695198329853862,
+ "grad_norm": 0.8293285369873047,
+ "learning_rate": 4.583771467725157e-06,
+ "loss": 0.5401,
+ "step": 438
+ },
+ {
+ "epoch": 1.372651356993737,
+ "grad_norm": 0.8607680797576904,
+ "learning_rate": 4.581376185233289e-06,
+ "loss": 0.5782,
+ "step": 439
+ },
+ {
+ "epoch": 1.3757828810020878,
+ "grad_norm": 0.8847081065177917,
+ "learning_rate": 4.578974659995601e-06,
+ "loss": 0.572,
+ "step": 440
+ },
+ {
+ "epoch": 1.3789144050104385,
+ "grad_norm": 0.7669641971588135,
+ "learning_rate": 4.576566899215075e-06,
+ "loss": 0.5655,
+ "step": 441
+ },
+ {
+ "epoch": 1.3820459290187892,
+ "grad_norm": 0.8514629006385803,
+ "learning_rate": 4.5741529101133904e-06,
+ "loss": 0.5218,
+ "step": 442
+ },
+ {
+ "epoch": 1.38517745302714,
+ "grad_norm": 0.8719842433929443,
+ "learning_rate": 4.5717326999309145e-06,
+ "loss": 0.5579,
+ "step": 443
+ },
+ {
+ "epoch": 1.3883089770354906,
+ "grad_norm": 1.1142809391021729,
+ "learning_rate": 4.569306275926667e-06,
+ "loss": 0.5535,
+ "step": 444
+ },
+ {
+ "epoch": 1.3914405010438413,
+ "grad_norm": 0.7392387986183167,
+ "learning_rate": 4.566873645378309e-06,
+ "loss": 0.5335,
+ "step": 445
+ },
+ {
+ "epoch": 1.394572025052192,
+ "grad_norm": 0.9066658616065979,
+ "learning_rate": 4.564434815582117e-06,
+ "loss": 0.5286,
+ "step": 446
+ },
+ {
+ "epoch": 1.3977035490605427,
+ "grad_norm": 0.8648932576179504,
+ "learning_rate": 4.561989793852959e-06,
+ "loss": 0.5008,
+ "step": 447
+ },
+ {
+ "epoch": 1.4008350730688934,
+ "grad_norm": 0.7768712043762207,
+ "learning_rate": 4.559538587524276e-06,
+ "loss": 0.5727,
+ "step": 448
+ },
+ {
+ "epoch": 1.4039665970772441,
+ "grad_norm": 0.7851182222366333,
+ "learning_rate": 4.557081203948059e-06,
+ "loss": 0.5731,
+ "step": 449
+ },
+ {
+ "epoch": 1.407098121085595,
+ "grad_norm": 0.8959861397743225,
+ "learning_rate": 4.5546176504948255e-06,
+ "loss": 0.5587,
+ "step": 450
+ },
+ {
+ "epoch": 1.4102296450939458,
+ "grad_norm": 1.0538026094436646,
+ "learning_rate": 4.552147934553601e-06,
+ "loss": 0.5808,
+ "step": 451
+ },
+ {
+ "epoch": 1.4133611691022965,
+ "grad_norm": 0.9887629151344299,
+ "learning_rate": 4.54967206353189e-06,
+ "loss": 0.5658,
+ "step": 452
+ },
+ {
+ "epoch": 1.4164926931106472,
+ "grad_norm": 0.9579302072525024,
+ "learning_rate": 4.547190044855663e-06,
+ "loss": 0.5092,
+ "step": 453
+ },
+ {
+ "epoch": 1.4196242171189979,
+ "grad_norm": 0.6993522047996521,
+ "learning_rate": 4.544701885969326e-06,
+ "loss": 0.5233,
+ "step": 454
+ },
+ {
+ "epoch": 1.4227557411273486,
+ "grad_norm": 0.8197568655014038,
+ "learning_rate": 4.542207594335703e-06,
+ "loss": 0.553,
+ "step": 455
+ },
+ {
+ "epoch": 1.4258872651356993,
+ "grad_norm": 2.921947717666626,
+ "learning_rate": 4.53970717743601e-06,
+ "loss": 0.4857,
+ "step": 456
+ },
+ {
+ "epoch": 1.4290187891440502,
+ "grad_norm": 1.3547242879867554,
+ "learning_rate": 4.53720064276984e-06,
+ "loss": 0.5676,
+ "step": 457
+ },
+ {
+ "epoch": 1.432150313152401,
+ "grad_norm": 1.4175567626953125,
+ "learning_rate": 4.534687997855131e-06,
+ "loss": 0.5164,
+ "step": 458
+ },
+ {
+ "epoch": 1.4352818371607516,
+ "grad_norm": 1.378146767616272,
+ "learning_rate": 4.532169250228145e-06,
+ "loss": 0.5429,
+ "step": 459
+ },
+ {
+ "epoch": 1.4384133611691023,
+ "grad_norm": 0.7811698317527771,
+ "learning_rate": 4.529644407443456e-06,
+ "loss": 0.524,
+ "step": 460
+ },
+ {
+ "epoch": 1.441544885177453,
+ "grad_norm": 1.1481678485870361,
+ "learning_rate": 4.527113477073914e-06,
+ "loss": 0.5513,
+ "step": 461
+ },
+ {
+ "epoch": 1.4446764091858038,
+ "grad_norm": 0.8450161218643188,
+ "learning_rate": 4.5245764667106266e-06,
+ "loss": 0.5632,
+ "step": 462
+ },
+ {
+ "epoch": 1.4478079331941545,
+ "grad_norm": 1.1582145690917969,
+ "learning_rate": 4.522033383962941e-06,
+ "loss": 0.5834,
+ "step": 463
+ },
+ {
+ "epoch": 1.4509394572025052,
+ "grad_norm": 1.0403447151184082,
+ "learning_rate": 4.519484236458416e-06,
+ "loss": 0.506,
+ "step": 464
+ },
+ {
+ "epoch": 1.4540709812108559,
+ "grad_norm": 0.7894920706748962,
+ "learning_rate": 4.516929031842799e-06,
+ "loss": 0.5526,
+ "step": 465
+ },
+ {
+ "epoch": 1.4572025052192066,
+ "grad_norm": 0.8092262744903564,
+ "learning_rate": 4.51436777778001e-06,
+ "loss": 0.5619,
+ "step": 466
+ },
+ {
+ "epoch": 1.4603340292275573,
+ "grad_norm": 0.9773806929588318,
+ "learning_rate": 4.511800481952106e-06,
+ "loss": 0.5179,
+ "step": 467
+ },
+ {
+ "epoch": 1.4634655532359082,
+ "grad_norm": 1.018676519393921,
+ "learning_rate": 4.509227152059271e-06,
+ "loss": 0.5415,
+ "step": 468
+ },
+ {
+ "epoch": 1.466597077244259,
+ "grad_norm": 0.7457838654518127,
+ "learning_rate": 4.506647795819784e-06,
+ "loss": 0.5473,
+ "step": 469
+ },
+ {
+ "epoch": 1.4697286012526096,
+ "grad_norm": 0.7826436161994934,
+ "learning_rate": 4.50406242097e-06,
+ "loss": 0.5526,
+ "step": 470
+ },
+ {
+ "epoch": 1.4728601252609603,
+ "grad_norm": 0.9492483139038086,
+ "learning_rate": 4.501471035264328e-06,
+ "loss": 0.5179,
+ "step": 471
+ },
+ {
+ "epoch": 1.475991649269311,
+ "grad_norm": 0.93398517370224,
+ "learning_rate": 4.4988736464752005e-06,
+ "loss": 0.5195,
+ "step": 472
+ },
+ {
+ "epoch": 1.4791231732776617,
+ "grad_norm": 0.8396487832069397,
+ "learning_rate": 4.496270262393061e-06,
+ "loss": 0.5447,
+ "step": 473
+ },
+ {
+ "epoch": 1.4822546972860124,
+ "grad_norm": 0.7450584173202515,
+ "learning_rate": 4.4936608908263315e-06,
+ "loss": 0.5207,
+ "step": 474
+ },
+ {
+ "epoch": 1.4853862212943634,
+ "grad_norm": 0.7887717485427856,
+ "learning_rate": 4.491045539601392e-06,
+ "loss": 0.523,
+ "step": 475
+ },
+ {
+ "epoch": 1.488517745302714,
+ "grad_norm": 1.2051388025283813,
+ "learning_rate": 4.48842421656256e-06,
+ "loss": 0.5402,
+ "step": 476
+ },
+ {
+ "epoch": 1.4916492693110648,
+ "grad_norm": 2.3103389739990234,
+ "learning_rate": 4.485796929572063e-06,
+ "loss": 0.5588,
+ "step": 477
+ },
+ {
+ "epoch": 1.4947807933194155,
+ "grad_norm": 0.7473112344741821,
+ "learning_rate": 4.483163686510016e-06,
+ "loss": 0.5731,
+ "step": 478
+ },
+ {
+ "epoch": 1.4979123173277662,
+ "grad_norm": 0.7545126676559448,
+ "learning_rate": 4.480524495274399e-06,
+ "loss": 0.5536,
+ "step": 479
+ },
+ {
+ "epoch": 1.501043841336117,
+ "grad_norm": 0.7801297903060913,
+ "learning_rate": 4.477879363781033e-06,
+ "loss": 0.5696,
+ "step": 480
+ },
+ {
+ "epoch": 1.5041753653444676,
+ "grad_norm": 0.7740563750267029,
+ "learning_rate": 4.475228299963554e-06,
+ "loss": 0.5526,
+ "step": 481
+ },
+ {
+ "epoch": 1.5073068893528183,
+ "grad_norm": 0.8600060343742371,
+ "learning_rate": 4.4725713117733936e-06,
+ "loss": 0.5051,
+ "step": 482
+ },
+ {
+ "epoch": 1.510438413361169,
+ "grad_norm": 0.6934283971786499,
+ "learning_rate": 4.46990840717975e-06,
+ "loss": 0.5564,
+ "step": 483
+ },
+ {
+ "epoch": 1.5135699373695197,
+ "grad_norm": 0.8927920460700989,
+ "learning_rate": 4.46723959416957e-06,
+ "loss": 0.5529,
+ "step": 484
+ },
+ {
+ "epoch": 1.5167014613778704,
+ "grad_norm": 0.9570988416671753,
+ "learning_rate": 4.464564880747517e-06,
+ "loss": 0.5661,
+ "step": 485
+ },
+ {
+ "epoch": 1.5198329853862211,
+ "grad_norm": 0.7229202389717102,
+ "learning_rate": 4.461884274935956e-06,
+ "loss": 0.5964,
+ "step": 486
+ },
+ {
+ "epoch": 1.522964509394572,
+ "grad_norm": 0.7367239594459534,
+ "learning_rate": 4.4591977847749225e-06,
+ "loss": 0.5455,
+ "step": 487
+ },
+ {
+ "epoch": 1.5260960334029228,
+ "grad_norm": 0.8062120676040649,
+ "learning_rate": 4.456505418322103e-06,
+ "loss": 0.5735,
+ "step": 488
+ },
+ {
+ "epoch": 1.5292275574112735,
+ "grad_norm": 0.8854482769966125,
+ "learning_rate": 4.453807183652808e-06,
+ "loss": 0.5421,
+ "step": 489
+ },
+ {
+ "epoch": 1.5323590814196242,
+ "grad_norm": 0.7518959045410156,
+ "learning_rate": 4.451103088859951e-06,
+ "loss": 0.5083,
+ "step": 490
+ },
+ {
+ "epoch": 1.535490605427975,
+ "grad_norm": 0.8621206879615784,
+ "learning_rate": 4.448393142054016e-06,
+ "loss": 0.4712,
+ "step": 491
+ },
+ {
+ "epoch": 1.5386221294363258,
+ "grad_norm": 1.0618741512298584,
+ "learning_rate": 4.445677351363046e-06,
+ "loss": 0.5808,
+ "step": 492
+ },
+ {
+ "epoch": 1.5417536534446765,
+ "grad_norm": 0.8261345028877258,
+ "learning_rate": 4.442955724932607e-06,
+ "loss": 0.5625,
+ "step": 493
+ },
+ {
+ "epoch": 1.5448851774530272,
+ "grad_norm": 0.7067139744758606,
+ "learning_rate": 4.440228270925772e-06,
+ "loss": 0.5661,
+ "step": 494
+ },
+ {
+ "epoch": 1.548016701461378,
+ "grad_norm": 0.9234416484832764,
+ "learning_rate": 4.437494997523091e-06,
+ "loss": 0.5428,
+ "step": 495
+ },
+ {
+ "epoch": 1.5511482254697286,
+ "grad_norm": 0.9273470044136047,
+ "learning_rate": 4.434755912922567e-06,
+ "loss": 0.5388,
+ "step": 496
+ },
+ {
+ "epoch": 1.5542797494780793,
+ "grad_norm": 1.0163263082504272,
+ "learning_rate": 4.4320110253396345e-06,
+ "loss": 0.5409,
+ "step": 497
+ },
+ {
+ "epoch": 1.55741127348643,
+ "grad_norm": 0.9542096853256226,
+ "learning_rate": 4.429260343007133e-06,
+ "loss": 0.5329,
+ "step": 498
+ },
+ {
+ "epoch": 1.5605427974947808,
+ "grad_norm": 0.8076801896095276,
+ "learning_rate": 4.426503874175283e-06,
+ "loss": 0.5616,
+ "step": 499
+ },
+ {
+ "epoch": 1.5636743215031315,
+ "grad_norm": 1.0063767433166504,
+ "learning_rate": 4.423741627111658e-06,
+ "loss": 0.5369,
+ "step": 500
+ },
+ {
+ "epoch": 1.5668058455114822,
+ "grad_norm": 1.040286898612976,
+ "learning_rate": 4.420973610101166e-06,
+ "loss": 0.5474,
+ "step": 501
+ },
+ {
+ "epoch": 1.5699373695198329,
+ "grad_norm": 0.7832860946655273,
+ "learning_rate": 4.4181998314460164e-06,
+ "loss": 0.5486,
+ "step": 502
+ },
+ {
+ "epoch": 1.5730688935281836,
+ "grad_norm": 0.8162257075309753,
+ "learning_rate": 4.415420299465706e-06,
+ "loss": 0.5054,
+ "step": 503
+ },
+ {
+ "epoch": 1.5762004175365343,
+ "grad_norm": 0.9108433127403259,
+ "learning_rate": 4.4126350224969814e-06,
+ "loss": 0.5399,
+ "step": 504
+ },
+ {
+ "epoch": 1.5793319415448852,
+ "grad_norm": 0.8002520799636841,
+ "learning_rate": 4.409844008893824e-06,
+ "loss": 0.5485,
+ "step": 505
+ },
+ {
+ "epoch": 1.582463465553236,
+ "grad_norm": 0.8543248772621155,
+ "learning_rate": 4.407047267027423e-06,
+ "loss": 0.4984,
+ "step": 506
+ },
+ {
+ "epoch": 1.5855949895615866,
+ "grad_norm": 0.7154155373573303,
+ "learning_rate": 4.404244805286141e-06,
+ "loss": 0.5392,
+ "step": 507
+ },
+ {
+ "epoch": 1.5887265135699373,
+ "grad_norm": 0.818553626537323,
+ "learning_rate": 4.401436632075504e-06,
+ "loss": 0.5178,
+ "step": 508
+ },
+ {
+ "epoch": 1.5918580375782883,
+ "grad_norm": 0.7535017728805542,
+ "learning_rate": 4.398622755818167e-06,
+ "loss": 0.5446,
+ "step": 509
+ },
+ {
+ "epoch": 1.594989561586639,
+ "grad_norm": 0.9328975677490234,
+ "learning_rate": 4.395803184953889e-06,
+ "loss": 0.5546,
+ "step": 510
+ },
+ {
+ "epoch": 1.5981210855949897,
+ "grad_norm": 0.7960026860237122,
+ "learning_rate": 4.392977927939508e-06,
+ "loss": 0.5451,
+ "step": 511
+ },
+ {
+ "epoch": 1.6012526096033404,
+ "grad_norm": 0.9686267971992493,
+ "learning_rate": 4.3901469932489195e-06,
+ "loss": 0.5198,
+ "step": 512
+ },
+ {
+ "epoch": 1.604384133611691,
+ "grad_norm": 0.903137743473053,
+ "learning_rate": 4.387310389373047e-06,
+ "loss": 0.5395,
+ "step": 513
+ },
+ {
+ "epoch": 1.6075156576200418,
+ "grad_norm": 1.0728516578674316,
+ "learning_rate": 4.384468124819816e-06,
+ "loss": 0.5843,
+ "step": 514
+ },
+ {
+ "epoch": 1.6106471816283925,
+ "grad_norm": 1.0245436429977417,
+ "learning_rate": 4.3816202081141345e-06,
+ "loss": 0.5672,
+ "step": 515
+ },
+ {
+ "epoch": 1.6137787056367432,
+ "grad_norm": 0.9672732353210449,
+ "learning_rate": 4.378766647797858e-06,
+ "loss": 0.5369,
+ "step": 516
+ },
+ {
+ "epoch": 1.616910229645094,
+ "grad_norm": 0.9149513840675354,
+ "learning_rate": 4.375907452429774e-06,
+ "loss": 0.4628,
+ "step": 517
+ },
+ {
+ "epoch": 1.6200417536534446,
+ "grad_norm": 0.7543843984603882,
+ "learning_rate": 4.373042630585567e-06,
+ "loss": 0.5344,
+ "step": 518
+ },
+ {
+ "epoch": 1.6231732776617953,
+ "grad_norm": 0.7589017152786255,
+ "learning_rate": 4.370172190857801e-06,
+ "loss": 0.5672,
+ "step": 519
+ },
+ {
+ "epoch": 1.626304801670146,
+ "grad_norm": 0.803040623664856,
+ "learning_rate": 4.367296141855887e-06,
+ "loss": 0.5313,
+ "step": 520
+ },
+ {
+ "epoch": 1.6294363256784967,
+ "grad_norm": 0.8305794596672058,
+ "learning_rate": 4.3644144922060625e-06,
+ "loss": 0.5754,
+ "step": 521
+ },
+ {
+ "epoch": 1.6325678496868476,
+ "grad_norm": 1.0086486339569092,
+ "learning_rate": 4.361527250551361e-06,
+ "loss": 0.5433,
+ "step": 522
+ },
+ {
+ "epoch": 1.6356993736951984,
+ "grad_norm": 0.7217550277709961,
+ "learning_rate": 4.35863442555159e-06,
+ "loss": 0.524,
+ "step": 523
+ },
+ {
+ "epoch": 1.638830897703549,
+ "grad_norm": 0.7788524627685547,
+ "learning_rate": 4.355736025883303e-06,
+ "loss": 0.536,
+ "step": 524
+ },
+ {
+ "epoch": 1.6419624217118998,
+ "grad_norm": 0.8460550904273987,
+ "learning_rate": 4.352832060239774e-06,
+ "loss": 0.5381,
+ "step": 525
+ },
+ {
+ "epoch": 1.6450939457202505,
+ "grad_norm": 0.7571215033531189,
+ "learning_rate": 4.3499225373309675e-06,
+ "loss": 0.541,
+ "step": 526
+ },
+ {
+ "epoch": 1.6482254697286014,
+ "grad_norm": 0.7343226671218872,
+ "learning_rate": 4.347007465883523e-06,
+ "loss": 0.5147,
+ "step": 527
+ },
+ {
+ "epoch": 1.651356993736952,
+ "grad_norm": 0.7271892428398132,
+ "learning_rate": 4.3440868546407165e-06,
+ "loss": 0.5311,
+ "step": 528
+ },
+ {
+ "epoch": 1.6544885177453028,
+ "grad_norm": 0.8166136741638184,
+ "learning_rate": 4.341160712362442e-06,
+ "loss": 0.5379,
+ "step": 529
+ },
+ {
+ "epoch": 1.6576200417536535,
+ "grad_norm": 1.5985233783721924,
+ "learning_rate": 4.338229047825182e-06,
+ "loss": 0.5782,
+ "step": 530
+ },
+ {
+ "epoch": 1.6607515657620042,
+ "grad_norm": 0.7835702896118164,
+ "learning_rate": 4.3352918698219835e-06,
+ "loss": 0.525,
+ "step": 531
+ },
+ {
+ "epoch": 1.663883089770355,
+ "grad_norm": 0.7278687953948975,
+ "learning_rate": 4.332349187162428e-06,
+ "loss": 0.5266,
+ "step": 532
+ },
+ {
+ "epoch": 1.6670146137787056,
+ "grad_norm": 0.8240190148353577,
+ "learning_rate": 4.329401008672608e-06,
+ "loss": 0.5515,
+ "step": 533
+ },
+ {
+ "epoch": 1.6701461377870563,
+ "grad_norm": 0.9447080492973328,
+ "learning_rate": 4.326447343195102e-06,
+ "loss": 0.5596,
+ "step": 534
+ },
+ {
+ "epoch": 1.673277661795407,
+ "grad_norm": 0.7827372550964355,
+ "learning_rate": 4.323488199588944e-06,
+ "loss": 0.5466,
+ "step": 535
+ },
+ {
+ "epoch": 1.6764091858037578,
+ "grad_norm": 0.9252517223358154,
+ "learning_rate": 4.320523586729599e-06,
+ "loss": 0.5433,
+ "step": 536
+ },
+ {
+ "epoch": 1.6795407098121085,
+ "grad_norm": 0.9437504410743713,
+ "learning_rate": 4.317553513508934e-06,
+ "loss": 0.5552,
+ "step": 537
+ },
+ {
+ "epoch": 1.6826722338204592,
+ "grad_norm": 0.8972746133804321,
+ "learning_rate": 4.3145779888351986e-06,
+ "loss": 0.5259,
+ "step": 538
+ },
+ {
+ "epoch": 1.6858037578288099,
+ "grad_norm": 0.8017446994781494,
+ "learning_rate": 4.311597021632988e-06,
+ "loss": 0.5263,
+ "step": 539
+ },
+ {
+ "epoch": 1.6889352818371608,
+ "grad_norm": 0.7875497341156006,
+ "learning_rate": 4.3086106208432235e-06,
+ "loss": 0.5316,
+ "step": 540
+ },
+ {
+ "epoch": 1.6920668058455115,
+ "grad_norm": 0.8204905986785889,
+ "learning_rate": 4.305618795423125e-06,
+ "loss": 0.5506,
+ "step": 541
+ },
+ {
+ "epoch": 1.6951983298538622,
+ "grad_norm": 0.888359785079956,
+ "learning_rate": 4.30262155434618e-06,
+ "loss": 0.4825,
+ "step": 542
+ },
+ {
+ "epoch": 1.698329853862213,
+ "grad_norm": 1.1026058197021484,
+ "learning_rate": 4.29961890660212e-06,
+ "loss": 0.5321,
+ "step": 543
+ },
+ {
+ "epoch": 1.7014613778705638,
+ "grad_norm": 0.7662535905838013,
+ "learning_rate": 4.2966108611968945e-06,
+ "loss": 0.5432,
+ "step": 544
+ },
+ {
+ "epoch": 1.7045929018789145,
+ "grad_norm": 1.1951749324798584,
+ "learning_rate": 4.293597427152641e-06,
+ "loss": 0.5123,
+ "step": 545
+ },
+ {
+ "epoch": 1.7077244258872653,
+ "grad_norm": 1.303183913230896,
+ "learning_rate": 4.290578613507661e-06,
+ "loss": 0.5346,
+ "step": 546
+ },
+ {
+ "epoch": 1.710855949895616,
+ "grad_norm": 0.7653357982635498,
+ "learning_rate": 4.287554429316387e-06,
+ "loss": 0.5397,
+ "step": 547
+ },
+ {
+ "epoch": 1.7139874739039667,
+ "grad_norm": 0.796215295791626,
+ "learning_rate": 4.284524883649366e-06,
+ "loss": 0.5421,
+ "step": 548
+ },
+ {
+ "epoch": 1.7171189979123174,
+ "grad_norm": 0.7599332332611084,
+ "learning_rate": 4.281489985593219e-06,
+ "loss": 0.5289,
+ "step": 549
+ },
+ {
+ "epoch": 1.720250521920668,
+ "grad_norm": 0.8029115796089172,
+ "learning_rate": 4.2784497442506265e-06,
+ "loss": 0.5409,
+ "step": 550
+ },
+ {
+ "epoch": 1.7233820459290188,
+ "grad_norm": 0.7194099426269531,
+ "learning_rate": 4.275404168740291e-06,
+ "loss": 0.5327,
+ "step": 551
+ },
+ {
+ "epoch": 1.7265135699373695,
+ "grad_norm": 0.7960740923881531,
+ "learning_rate": 4.272353268196917e-06,
+ "loss": 0.4896,
+ "step": 552
+ },
+ {
+ "epoch": 1.7296450939457202,
+ "grad_norm": 0.9572116732597351,
+ "learning_rate": 4.269297051771178e-06,
+ "loss": 0.5402,
+ "step": 553
+ },
+ {
+ "epoch": 1.732776617954071,
+ "grad_norm": 1.3604938983917236,
+ "learning_rate": 4.266235528629695e-06,
+ "loss": 0.5792,
+ "step": 554
+ },
+ {
+ "epoch": 1.7359081419624216,
+ "grad_norm": 2.067286729812622,
+ "learning_rate": 4.263168707955002e-06,
+ "loss": 0.5033,
+ "step": 555
+ },
+ {
+ "epoch": 1.7390396659707723,
+ "grad_norm": 0.8031097054481506,
+ "learning_rate": 4.260096598945523e-06,
+ "loss": 0.5117,
+ "step": 556
+ },
+ {
+ "epoch": 1.742171189979123,
+ "grad_norm": 1.0241729021072388,
+ "learning_rate": 4.257019210815546e-06,
+ "loss": 0.5359,
+ "step": 557
+ },
+ {
+ "epoch": 1.745302713987474,
+ "grad_norm": 0.7625218629837036,
+ "learning_rate": 4.25393655279519e-06,
+ "loss": 0.5625,
+ "step": 558
+ },
+ {
+ "epoch": 1.7484342379958246,
+ "grad_norm": 0.8603503704071045,
+ "learning_rate": 4.250848634130381e-06,
+ "loss": 0.5043,
+ "step": 559
+ },
+ {
+ "epoch": 1.7515657620041754,
+ "grad_norm": 0.9543750286102295,
+ "learning_rate": 4.247755464082824e-06,
+ "loss": 0.5364,
+ "step": 560
+ },
+ {
+ "epoch": 1.754697286012526,
+ "grad_norm": 0.9707463979721069,
+ "learning_rate": 4.244657051929973e-06,
+ "loss": 0.5184,
+ "step": 561
+ },
+ {
+ "epoch": 1.757828810020877,
+ "grad_norm": 0.7491432428359985,
+ "learning_rate": 4.241553406965008e-06,
+ "loss": 0.559,
+ "step": 562
+ },
+ {
+ "epoch": 1.7609603340292277,
+ "grad_norm": 0.7444972991943359,
+ "learning_rate": 4.238444538496801e-06,
+ "loss": 0.5327,
+ "step": 563
+ },
+ {
+ "epoch": 1.7640918580375784,
+ "grad_norm": 2.7108678817749023,
+ "learning_rate": 4.235330455849892e-06,
+ "loss": 0.55,
+ "step": 564
+ },
+ {
+ "epoch": 1.767223382045929,
+ "grad_norm": 1.6716049909591675,
+ "learning_rate": 4.232211168364459e-06,
+ "loss": 0.5093,
+ "step": 565
+ },
+ {
+ "epoch": 1.7703549060542798,
+ "grad_norm": 0.7023475170135498,
+ "learning_rate": 4.229086685396295e-06,
+ "loss": 0.569,
+ "step": 566
+ },
+ {
+ "epoch": 1.7734864300626305,
+ "grad_norm": 0.8596265316009521,
+ "learning_rate": 4.225957016316771e-06,
+ "loss": 0.5128,
+ "step": 567
+ },
+ {
+ "epoch": 1.7766179540709812,
+ "grad_norm": 0.8110849857330322,
+ "learning_rate": 4.222822170512816e-06,
+ "loss": 0.5142,
+ "step": 568
+ },
+ {
+ "epoch": 1.779749478079332,
+ "grad_norm": 0.7583725452423096,
+ "learning_rate": 4.219682157386884e-06,
+ "loss": 0.5584,
+ "step": 569
+ },
+ {
+ "epoch": 1.7828810020876826,
+ "grad_norm": 0.787811279296875,
+ "learning_rate": 4.21653698635693e-06,
+ "loss": 0.5068,
+ "step": 570
+ },
+ {
+ "epoch": 1.7860125260960333,
+ "grad_norm": 0.8298993110656738,
+ "learning_rate": 4.213386666856375e-06,
+ "loss": 0.5496,
+ "step": 571
+ },
+ {
+ "epoch": 1.789144050104384,
+ "grad_norm": 0.8999841213226318,
+ "learning_rate": 4.210231208334087e-06,
+ "loss": 0.5454,
+ "step": 572
+ },
+ {
+ "epoch": 1.7922755741127347,
+ "grad_norm": 4.264521598815918,
+ "learning_rate": 4.207070620254345e-06,
+ "loss": 0.5486,
+ "step": 573
+ },
+ {
+ "epoch": 1.7954070981210855,
+ "grad_norm": 0.8517448306083679,
+ "learning_rate": 4.203904912096812e-06,
+ "loss": 0.5566,
+ "step": 574
+ },
+ {
+ "epoch": 1.7985386221294362,
+ "grad_norm": 0.9230182766914368,
+ "learning_rate": 4.200734093356511e-06,
+ "loss": 0.4964,
+ "step": 575
+ },
+ {
+ "epoch": 1.801670146137787,
+ "grad_norm": 1.224039912223816,
+ "learning_rate": 4.197558173543791e-06,
+ "loss": 0.5356,
+ "step": 576
+ },
+ {
+ "epoch": 1.8048016701461378,
+ "grad_norm": 0.9998573660850525,
+ "learning_rate": 4.194377162184301e-06,
+ "loss": 0.5334,
+ "step": 577
+ },
+ {
+ "epoch": 1.8079331941544885,
+ "grad_norm": 0.865521252155304,
+ "learning_rate": 4.191191068818963e-06,
+ "loss": 0.5036,
+ "step": 578
+ },
+ {
+ "epoch": 1.8110647181628392,
+ "grad_norm": 0.8048138618469238,
+ "learning_rate": 4.18799990300394e-06,
+ "loss": 0.4979,
+ "step": 579
+ },
+ {
+ "epoch": 1.8141962421711901,
+ "grad_norm": 0.717815637588501,
+ "learning_rate": 4.184803674310609e-06,
+ "loss": 0.5623,
+ "step": 580
+ },
+ {
+ "epoch": 1.8173277661795408,
+ "grad_norm": 0.8403327465057373,
+ "learning_rate": 4.1816023923255335e-06,
+ "loss": 0.5055,
+ "step": 581
+ },
+ {
+ "epoch": 1.8204592901878915,
+ "grad_norm": 0.7298995852470398,
+ "learning_rate": 4.178396066650432e-06,
+ "loss": 0.5641,
+ "step": 582
+ },
+ {
+ "epoch": 1.8235908141962422,
+ "grad_norm": 0.9469727873802185,
+ "learning_rate": 4.1751847069021516e-06,
+ "loss": 0.5557,
+ "step": 583
+ },
+ {
+ "epoch": 1.826722338204593,
+ "grad_norm": 0.8641784191131592,
+ "learning_rate": 4.1719683227126386e-06,
+ "loss": 0.5153,
+ "step": 584
+ },
+ {
+ "epoch": 1.8298538622129437,
+ "grad_norm": 0.7316668629646301,
+ "learning_rate": 4.168746923728908e-06,
+ "loss": 0.4988,
+ "step": 585
+ },
+ {
+ "epoch": 1.8329853862212944,
+ "grad_norm": 0.8795468807220459,
+ "learning_rate": 4.165520519613017e-06,
+ "loss": 0.5483,
+ "step": 586
+ },
+ {
+ "epoch": 1.836116910229645,
+ "grad_norm": 0.7323560118675232,
+ "learning_rate": 4.162289120042034e-06,
+ "loss": 0.5194,
+ "step": 587
+ },
+ {
+ "epoch": 1.8392484342379958,
+ "grad_norm": 0.8217021822929382,
+ "learning_rate": 4.159052734708013e-06,
+ "loss": 0.532,
+ "step": 588
+ },
+ {
+ "epoch": 1.8423799582463465,
+ "grad_norm": 0.7669674754142761,
+ "learning_rate": 4.155811373317958e-06,
+ "loss": 0.541,
+ "step": 589
+ },
+ {
+ "epoch": 1.8455114822546972,
+ "grad_norm": 0.8312156200408936,
+ "learning_rate": 4.152565045593801e-06,
+ "loss": 0.5298,
+ "step": 590
+ },
+ {
+ "epoch": 1.848643006263048,
+ "grad_norm": 0.8967565298080444,
+ "learning_rate": 4.1493137612723665e-06,
+ "loss": 0.51,
+ "step": 591
+ },
+ {
+ "epoch": 1.8517745302713986,
+ "grad_norm": 0.8706664443016052,
+ "learning_rate": 4.14605753010535e-06,
+ "loss": 0.4941,
+ "step": 592
+ },
+ {
+ "epoch": 1.8549060542797495,
+ "grad_norm": 0.7585753798484802,
+ "learning_rate": 4.14279636185928e-06,
+ "loss": 0.5161,
+ "step": 593
+ },
+ {
+ "epoch": 1.8580375782881002,
+ "grad_norm": 0.7495241165161133,
+ "learning_rate": 4.1395302663154954e-06,
+ "loss": 0.5388,
+ "step": 594
+ },
+ {
+ "epoch": 1.861169102296451,
+ "grad_norm": 1.0746862888336182,
+ "learning_rate": 4.136259253270114e-06,
+ "loss": 0.4976,
+ "step": 595
+ },
+ {
+ "epoch": 1.8643006263048016,
+ "grad_norm": 0.872309684753418,
+ "learning_rate": 4.132983332534e-06,
+ "loss": 0.559,
+ "step": 596
+ },
+ {
+ "epoch": 1.8674321503131524,
+ "grad_norm": 0.8759891986846924,
+ "learning_rate": 4.1297025139327405e-06,
+ "loss": 0.5436,
+ "step": 597
+ },
+ {
+ "epoch": 1.8705636743215033,
+ "grad_norm": 1.1044493913650513,
+ "learning_rate": 4.126416807306611e-06,
+ "loss": 0.5476,
+ "step": 598
+ },
+ {
+ "epoch": 1.873695198329854,
+ "grad_norm": 0.8340442180633545,
+ "learning_rate": 4.123126222510549e-06,
+ "loss": 0.4592,
+ "step": 599
+ },
+ {
+ "epoch": 1.8768267223382047,
+ "grad_norm": 0.8331449031829834,
+ "learning_rate": 4.119830769414123e-06,
+ "loss": 0.5219,
+ "step": 600
+ },
+ {
+ "epoch": 1.8799582463465554,
+ "grad_norm": 1.0862973928451538,
+ "learning_rate": 4.116530457901503e-06,
+ "loss": 0.5159,
+ "step": 601
+ },
+ {
+ "epoch": 1.883089770354906,
+ "grad_norm": 0.8524414300918579,
+ "learning_rate": 4.113225297871431e-06,
+ "loss": 0.5502,
+ "step": 602
+ },
+ {
+ "epoch": 1.8862212943632568,
+ "grad_norm": 1.4945416450500488,
+ "learning_rate": 4.10991529923719e-06,
+ "loss": 0.5627,
+ "step": 603
+ },
+ {
+ "epoch": 1.8893528183716075,
+ "grad_norm": 1.5518157482147217,
+ "learning_rate": 4.10660047192658e-06,
+ "loss": 0.5517,
+ "step": 604
+ },
+ {
+ "epoch": 1.8924843423799582,
+ "grad_norm": 2.56638765335083,
+ "learning_rate": 4.103280825881878e-06,
+ "loss": 0.5422,
+ "step": 605
+ },
+ {
+ "epoch": 1.895615866388309,
+ "grad_norm": 0.867254912853241,
+ "learning_rate": 4.099956371059817e-06,
+ "loss": 0.4991,
+ "step": 606
+ },
+ {
+ "epoch": 1.8987473903966596,
+ "grad_norm": 0.9555892944335938,
+ "learning_rate": 4.096627117431554e-06,
+ "loss": 0.5339,
+ "step": 607
+ },
+ {
+ "epoch": 1.9018789144050103,
+ "grad_norm": 0.7905483245849609,
+ "learning_rate": 4.093293074982638e-06,
+ "loss": 0.5168,
+ "step": 608
+ },
+ {
+ "epoch": 1.905010438413361,
+ "grad_norm": 0.7500227093696594,
+ "learning_rate": 4.089954253712981e-06,
+ "loss": 0.5096,
+ "step": 609
+ },
+ {
+ "epoch": 1.9081419624217117,
+ "grad_norm": 0.8458324074745178,
+ "learning_rate": 4.086610663636828e-06,
+ "loss": 0.5296,
+ "step": 610
+ },
+ {
+ "epoch": 1.9112734864300627,
+ "grad_norm": 0.7392706871032715,
+ "learning_rate": 4.08326231478273e-06,
+ "loss": 0.5305,
+ "step": 611
+ },
+ {
+ "epoch": 1.9144050104384134,
+ "grad_norm": 0.8113343715667725,
+ "learning_rate": 4.079909217193508e-06,
+ "loss": 0.5044,
+ "step": 612
+ },
+ {
+ "epoch": 1.917536534446764,
+ "grad_norm": 0.7637801766395569,
+ "learning_rate": 4.076551380926226e-06,
+ "loss": 0.5298,
+ "step": 613
+ },
+ {
+ "epoch": 1.9206680584551148,
+ "grad_norm": 1.0523375272750854,
+ "learning_rate": 4.073188816052164e-06,
+ "loss": 0.5111,
+ "step": 614
+ },
+ {
+ "epoch": 1.9237995824634657,
+ "grad_norm": 0.8224868774414062,
+ "learning_rate": 4.069821532656781e-06,
+ "loss": 0.5178,
+ "step": 615
+ },
+ {
+ "epoch": 1.9269311064718164,
+ "grad_norm": 0.7270777821540833,
+ "learning_rate": 4.066449540839693e-06,
+ "loss": 0.5307,
+ "step": 616
+ },
+ {
+ "epoch": 1.9300626304801671,
+ "grad_norm": 0.7214602828025818,
+ "learning_rate": 4.063072850714631e-06,
+ "loss": 0.5171,
+ "step": 617
+ },
+ {
+ "epoch": 1.9331941544885178,
+ "grad_norm": 0.7333671450614929,
+ "learning_rate": 4.059691472409426e-06,
+ "loss": 0.56,
+ "step": 618
+ },
+ {
+ "epoch": 1.9363256784968685,
+ "grad_norm": 0.9166824221611023,
+ "learning_rate": 4.056305416065964e-06,
+ "loss": 0.5388,
+ "step": 619
+ },
+ {
+ "epoch": 1.9394572025052192,
+ "grad_norm": 0.7743303775787354,
+ "learning_rate": 4.052914691840167e-06,
+ "loss": 0.5134,
+ "step": 620
+ },
+ {
+ "epoch": 1.94258872651357,
+ "grad_norm": 0.704097330570221,
+ "learning_rate": 4.0495193099019524e-06,
+ "loss": 0.4926,
+ "step": 621
+ },
+ {
+ "epoch": 1.9457202505219207,
+ "grad_norm": 0.8508503437042236,
+ "learning_rate": 4.046119280435212e-06,
+ "loss": 0.5008,
+ "step": 622
+ },
+ {
+ "epoch": 1.9488517745302714,
+ "grad_norm": 0.725933313369751,
+ "learning_rate": 4.042714613637775e-06,
+ "loss": 0.5549,
+ "step": 623
+ },
+ {
+ "epoch": 1.951983298538622,
+ "grad_norm": 0.8919175863265991,
+ "learning_rate": 4.039305319721381e-06,
+ "loss": 0.5183,
+ "step": 624
+ },
+ {
+ "epoch": 1.9551148225469728,
+ "grad_norm": 0.827919065952301,
+ "learning_rate": 4.035891408911644e-06,
+ "loss": 0.5624,
+ "step": 625
+ },
+ {
+ "epoch": 1.9582463465553235,
+ "grad_norm": 0.7415187358856201,
+ "learning_rate": 4.032472891448032e-06,
+ "loss": 0.5454,
+ "step": 626
+ },
+ {
+ "epoch": 1.9613778705636742,
+ "grad_norm": 0.7675788998603821,
+ "learning_rate": 4.029049777583824e-06,
+ "loss": 0.5361,
+ "step": 627
+ },
+ {
+ "epoch": 1.964509394572025,
+ "grad_norm": 0.8464030623435974,
+ "learning_rate": 4.025622077586088e-06,
+ "loss": 0.5295,
+ "step": 628
+ },
+ {
+ "epoch": 1.9676409185803758,
+ "grad_norm": 0.7641633749008179,
+ "learning_rate": 4.022189801735646e-06,
+ "loss": 0.55,
+ "step": 629
+ },
+ {
+ "epoch": 1.9707724425887265,
+ "grad_norm": 0.7813227772712708,
+ "learning_rate": 4.018752960327048e-06,
+ "loss": 0.5587,
+ "step": 630
+ },
+ {
+ "epoch": 1.9739039665970772,
+ "grad_norm": 0.7576701641082764,
+ "learning_rate": 4.015311563668533e-06,
+ "loss": 0.5413,
+ "step": 631
+ },
+ {
+ "epoch": 1.977035490605428,
+ "grad_norm": 0.6949650049209595,
+ "learning_rate": 4.011865622082004e-06,
+ "loss": 0.5344,
+ "step": 632
+ },
+ {
+ "epoch": 1.9801670146137789,
+ "grad_norm": 0.9009145498275757,
+ "learning_rate": 4.008415145902997e-06,
+ "loss": 0.5233,
+ "step": 633
+ },
+ {
+ "epoch": 1.9832985386221296,
+ "grad_norm": 0.7635822892189026,
+ "learning_rate": 4.004960145480651e-06,
+ "loss": 0.4981,
+ "step": 634
+ },
+ {
+ "epoch": 1.9864300626304803,
+ "grad_norm": 0.8916334509849548,
+ "learning_rate": 4.0015006311776685e-06,
+ "loss": 0.5311,
+ "step": 635
+ },
+ {
+ "epoch": 1.989561586638831,
+ "grad_norm": 0.7197673320770264,
+ "learning_rate": 3.998036613370295e-06,
+ "loss": 0.5361,
+ "step": 636
+ },
+ {
+ "epoch": 1.9926931106471817,
+ "grad_norm": 0.8391228914260864,
+ "learning_rate": 3.994568102448284e-06,
+ "loss": 0.5473,
+ "step": 637
+ },
+ {
+ "epoch": 1.9958246346555324,
+ "grad_norm": 0.9371750950813293,
+ "learning_rate": 3.991095108814862e-06,
+ "loss": 0.5303,
+ "step": 638
+ },
+ {
+ "epoch": 1.998956158663883,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9876176428867046e-06,
+ "loss": 0.533,
+ "step": 639
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9841357150938984e-06,
+ "loss": 0.1831,
+ "step": 640
+ },
+ {
+ "epoch": 2.0031315240083507,
+ "grad_norm": 0.8802503347396851,
+ "learning_rate": 3.9806493358799135e-06,
+ "loss": 0.493,
+ "step": 641
+ },
+ {
+ "epoch": 2.0062630480167014,
+ "grad_norm": 0.802759051322937,
+ "learning_rate": 3.977158515701571e-06,
+ "loss": 0.498,
+ "step": 642
+ },
+ {
+ "epoch": 2.009394572025052,
+ "grad_norm": 1.0235401391983032,
+ "learning_rate": 3.973663265029013e-06,
+ "loss": 0.4887,
+ "step": 643
+ },
+ {
+ "epoch": 2.012526096033403,
+ "grad_norm": 0.7219089865684509,
+ "learning_rate": 3.97016359434567e-06,
+ "loss": 0.4628,
+ "step": 644
+ },
+ {
+ "epoch": 2.0156576200417535,
+ "grad_norm": 0.7887073755264282,
+ "learning_rate": 3.966659514148229e-06,
+ "loss": 0.525,
+ "step": 645
+ },
+ {
+ "epoch": 2.0187891440501042,
+ "grad_norm": 0.7960914969444275,
+ "learning_rate": 3.963151034946602e-06,
+ "loss": 0.4643,
+ "step": 646
+ },
+ {
+ "epoch": 2.021920668058455,
+ "grad_norm": 0.7902271151542664,
+ "learning_rate": 3.959638167263895e-06,
+ "loss": 0.4922,
+ "step": 647
+ },
+ {
+ "epoch": 2.0250521920668056,
+ "grad_norm": 0.9501478672027588,
+ "learning_rate": 3.956120921636379e-06,
+ "loss": 0.5285,
+ "step": 648
+ },
+ {
+ "epoch": 2.028183716075157,
+ "grad_norm": 0.9510527849197388,
+ "learning_rate": 3.952599308613454e-06,
+ "loss": 0.4909,
+ "step": 649
+ },
+ {
+ "epoch": 2.0313152400835075,
+ "grad_norm": 0.9408219456672668,
+ "learning_rate": 3.949073338757619e-06,
+ "loss": 0.4912,
+ "step": 650
+ },
+ {
+ "epoch": 2.034446764091858,
+ "grad_norm": 0.7148041725158691,
+ "learning_rate": 3.945543022644441e-06,
+ "loss": 0.4792,
+ "step": 651
+ },
+ {
+ "epoch": 2.037578288100209,
+ "grad_norm": 0.7737464904785156,
+ "learning_rate": 3.942008370862522e-06,
+ "loss": 0.4694,
+ "step": 652
+ },
+ {
+ "epoch": 2.0407098121085596,
+ "grad_norm": 0.8405889868736267,
+ "learning_rate": 3.938469394013472e-06,
+ "loss": 0.5048,
+ "step": 653
+ },
+ {
+ "epoch": 2.0438413361169103,
+ "grad_norm": 0.7896456718444824,
+ "learning_rate": 3.934926102711869e-06,
+ "loss": 0.4882,
+ "step": 654
+ },
+ {
+ "epoch": 2.046972860125261,
+ "grad_norm": 0.9290387034416199,
+ "learning_rate": 3.931378507585231e-06,
+ "loss": 0.503,
+ "step": 655
+ },
+ {
+ "epoch": 2.0501043841336117,
+ "grad_norm": 0.7386118769645691,
+ "learning_rate": 3.927826619273991e-06,
+ "loss": 0.4918,
+ "step": 656
+ },
+ {
+ "epoch": 2.0532359081419624,
+ "grad_norm": 0.9878676533699036,
+ "learning_rate": 3.92427044843145e-06,
+ "loss": 0.4958,
+ "step": 657
+ },
+ {
+ "epoch": 2.056367432150313,
+ "grad_norm": 1.0111151933670044,
+ "learning_rate": 3.92071000572376e-06,
+ "loss": 0.4886,
+ "step": 658
+ },
+ {
+ "epoch": 2.059498956158664,
+ "grad_norm": 0.8612061738967896,
+ "learning_rate": 3.917145301829884e-06,
+ "loss": 0.5216,
+ "step": 659
+ },
+ {
+ "epoch": 2.0626304801670146,
+ "grad_norm": 0.7458518743515015,
+ "learning_rate": 3.913576347441564e-06,
+ "loss": 0.4807,
+ "step": 660
+ },
+ {
+ "epoch": 2.0657620041753653,
+ "grad_norm": 0.7775886058807373,
+ "learning_rate": 3.910003153263294e-06,
+ "loss": 0.4837,
+ "step": 661
+ },
+ {
+ "epoch": 2.068893528183716,
+ "grad_norm": 0.7144196629524231,
+ "learning_rate": 3.906425730012282e-06,
+ "loss": 0.5081,
+ "step": 662
+ },
+ {
+ "epoch": 2.0720250521920667,
+ "grad_norm": 0.844971776008606,
+ "learning_rate": 3.9028440884184215e-06,
+ "loss": 0.474,
+ "step": 663
+ },
+ {
+ "epoch": 2.0751565762004174,
+ "grad_norm": 0.9709283113479614,
+ "learning_rate": 3.899258239224257e-06,
+ "loss": 0.503,
+ "step": 664
+ },
+ {
+ "epoch": 2.078288100208768,
+ "grad_norm": 1.1325515508651733,
+ "learning_rate": 3.895668193184954e-06,
+ "loss": 0.5058,
+ "step": 665
+ },
+ {
+ "epoch": 2.081419624217119,
+ "grad_norm": 0.7211254239082336,
+ "learning_rate": 3.892073961068266e-06,
+ "loss": 0.4982,
+ "step": 666
+ },
+ {
+ "epoch": 2.08455114822547,
+ "grad_norm": 0.8975517153739929,
+ "learning_rate": 3.888475553654502e-06,
+ "loss": 0.4699,
+ "step": 667
+ },
+ {
+ "epoch": 2.0876826722338206,
+ "grad_norm": 0.8270771503448486,
+ "learning_rate": 3.884872981736493e-06,
+ "loss": 0.4586,
+ "step": 668
+ },
+ {
+ "epoch": 2.0908141962421714,
+ "grad_norm": 0.8606625199317932,
+ "learning_rate": 3.881266256119561e-06,
+ "loss": 0.5299,
+ "step": 669
+ },
+ {
+ "epoch": 2.093945720250522,
+ "grad_norm": 0.9013976454734802,
+ "learning_rate": 3.877655387621488e-06,
+ "loss": 0.4887,
+ "step": 670
+ },
+ {
+ "epoch": 2.0970772442588728,
+ "grad_norm": 0.7603903412818909,
+ "learning_rate": 3.8740403870724795e-06,
+ "loss": 0.4992,
+ "step": 671
+ },
+ {
+ "epoch": 2.1002087682672235,
+ "grad_norm": 1.0432350635528564,
+ "learning_rate": 3.870421265315137e-06,
+ "loss": 0.5035,
+ "step": 672
+ },
+ {
+ "epoch": 2.103340292275574,
+ "grad_norm": 0.7727136611938477,
+ "learning_rate": 3.8667980332044195e-06,
+ "loss": 0.5006,
+ "step": 673
+ },
+ {
+ "epoch": 2.106471816283925,
+ "grad_norm": 0.9764307141304016,
+ "learning_rate": 3.863170701607618e-06,
+ "loss": 0.5061,
+ "step": 674
+ },
+ {
+ "epoch": 2.1096033402922756,
+ "grad_norm": 0.747818648815155,
+ "learning_rate": 3.859539281404317e-06,
+ "loss": 0.4761,
+ "step": 675
+ },
+ {
+ "epoch": 2.1127348643006263,
+ "grad_norm": 0.7254915237426758,
+ "learning_rate": 3.855903783486364e-06,
+ "loss": 0.5166,
+ "step": 676
+ },
+ {
+ "epoch": 2.115866388308977,
+ "grad_norm": 0.7678592801094055,
+ "learning_rate": 3.852264218757839e-06,
+ "loss": 0.5122,
+ "step": 677
+ },
+ {
+ "epoch": 2.1189979123173277,
+ "grad_norm": 0.8140144348144531,
+ "learning_rate": 3.8486205981350165e-06,
+ "loss": 0.4551,
+ "step": 678
+ },
+ {
+ "epoch": 2.1221294363256784,
+ "grad_norm": 0.9417359232902527,
+ "learning_rate": 3.844972932546338e-06,
+ "loss": 0.4748,
+ "step": 679
+ },
+ {
+ "epoch": 2.125260960334029,
+ "grad_norm": 0.8035290241241455,
+ "learning_rate": 3.841321232932378e-06,
+ "loss": 0.5079,
+ "step": 680
+ },
+ {
+ "epoch": 2.12839248434238,
+ "grad_norm": 0.8300641775131226,
+ "learning_rate": 3.837665510245809e-06,
+ "loss": 0.5018,
+ "step": 681
+ },
+ {
+ "epoch": 2.1315240083507305,
+ "grad_norm": 0.8293547034263611,
+ "learning_rate": 3.8340057754513715e-06,
+ "loss": 0.5042,
+ "step": 682
+ },
+ {
+ "epoch": 2.1346555323590812,
+ "grad_norm": 0.7780007719993591,
+ "learning_rate": 3.8303420395258365e-06,
+ "loss": 0.5048,
+ "step": 683
+ },
+ {
+ "epoch": 2.137787056367432,
+ "grad_norm": 0.7519420385360718,
+ "learning_rate": 3.8266743134579785e-06,
+ "loss": 0.5108,
+ "step": 684
+ },
+ {
+ "epoch": 2.140918580375783,
+ "grad_norm": 0.7872384190559387,
+ "learning_rate": 3.8230026082485404e-06,
+ "loss": 0.4924,
+ "step": 685
+ },
+ {
+ "epoch": 2.144050104384134,
+ "grad_norm": 0.7479491829872131,
+ "learning_rate": 3.819326934910197e-06,
+ "loss": 0.5184,
+ "step": 686
+ },
+ {
+ "epoch": 2.1471816283924845,
+ "grad_norm": 0.8438299298286438,
+ "learning_rate": 3.815647304467527e-06,
+ "loss": 0.4791,
+ "step": 687
+ },
+ {
+ "epoch": 2.150313152400835,
+ "grad_norm": 0.7923721671104431,
+ "learning_rate": 3.8119637279569773e-06,
+ "loss": 0.5305,
+ "step": 688
+ },
+ {
+ "epoch": 2.153444676409186,
+ "grad_norm": 0.7856534719467163,
+ "learning_rate": 3.80827621642683e-06,
+ "loss": 0.5063,
+ "step": 689
+ },
+ {
+ "epoch": 2.1565762004175366,
+ "grad_norm": 0.8544500470161438,
+ "learning_rate": 3.8045847809371706e-06,
+ "loss": 0.4989,
+ "step": 690
+ },
+ {
+ "epoch": 2.1597077244258873,
+ "grad_norm": 0.865390956401825,
+ "learning_rate": 3.800889432559852e-06,
+ "loss": 0.4931,
+ "step": 691
+ },
+ {
+ "epoch": 2.162839248434238,
+ "grad_norm": 0.9809399247169495,
+ "learning_rate": 3.797190182378466e-06,
+ "loss": 0.4785,
+ "step": 692
+ },
+ {
+ "epoch": 2.1659707724425887,
+ "grad_norm": 0.7954536080360413,
+ "learning_rate": 3.793487041488304e-06,
+ "loss": 0.4847,
+ "step": 693
+ },
+ {
+ "epoch": 2.1691022964509394,
+ "grad_norm": 0.754704475402832,
+ "learning_rate": 3.7897800209963298e-06,
+ "loss": 0.5125,
+ "step": 694
+ },
+ {
+ "epoch": 2.17223382045929,
+ "grad_norm": 0.7319822311401367,
+ "learning_rate": 3.7860691320211414e-06,
+ "loss": 0.477,
+ "step": 695
+ },
+ {
+ "epoch": 2.175365344467641,
+ "grad_norm": 0.8198635578155518,
+ "learning_rate": 3.7823543856929403e-06,
+ "loss": 0.4764,
+ "step": 696
+ },
+ {
+ "epoch": 2.1784968684759916,
+ "grad_norm": 0.708933413028717,
+ "learning_rate": 3.7786357931534987e-06,
+ "loss": 0.4948,
+ "step": 697
+ },
+ {
+ "epoch": 2.1816283924843423,
+ "grad_norm": 0.8493193984031677,
+ "learning_rate": 3.774913365556123e-06,
+ "loss": 0.5271,
+ "step": 698
+ },
+ {
+ "epoch": 2.184759916492693,
+ "grad_norm": 0.7999475002288818,
+ "learning_rate": 3.771187114065622e-06,
+ "loss": 0.4804,
+ "step": 699
+ },
+ {
+ "epoch": 2.1878914405010437,
+ "grad_norm": 0.8366796970367432,
+ "learning_rate": 3.7674570498582776e-06,
+ "loss": 0.457,
+ "step": 700
+ },
+ {
+ "epoch": 2.1910229645093944,
+ "grad_norm": 0.7935530543327332,
+ "learning_rate": 3.7637231841218015e-06,
+ "loss": 0.5001,
+ "step": 701
+ },
+ {
+ "epoch": 2.1941544885177455,
+ "grad_norm": 0.7700153589248657,
+ "learning_rate": 3.7599855280553125e-06,
+ "loss": 0.5091,
+ "step": 702
+ },
+ {
+ "epoch": 2.1972860125260962,
+ "grad_norm": 0.7991652488708496,
+ "learning_rate": 3.756244092869294e-06,
+ "loss": 0.4955,
+ "step": 703
+ },
+ {
+ "epoch": 2.200417536534447,
+ "grad_norm": 0.720051646232605,
+ "learning_rate": 3.752498889785567e-06,
+ "loss": 0.4902,
+ "step": 704
+ },
+ {
+ "epoch": 2.2035490605427976,
+ "grad_norm": 0.7312369346618652,
+ "learning_rate": 3.748749930037252e-06,
+ "loss": 0.4935,
+ "step": 705
+ },
+ {
+ "epoch": 2.2066805845511483,
+ "grad_norm": 0.8418563008308411,
+ "learning_rate": 3.744997224868739e-06,
+ "loss": 0.5186,
+ "step": 706
+ },
+ {
+ "epoch": 2.209812108559499,
+ "grad_norm": 0.8324081301689148,
+ "learning_rate": 3.741240785535649e-06,
+ "loss": 0.482,
+ "step": 707
+ },
+ {
+ "epoch": 2.2129436325678498,
+ "grad_norm": 0.8051855564117432,
+ "learning_rate": 3.737480623304805e-06,
+ "loss": 0.4663,
+ "step": 708
+ },
+ {
+ "epoch": 2.2160751565762005,
+ "grad_norm": 0.9464184641838074,
+ "learning_rate": 3.7337167494541948e-06,
+ "loss": 0.451,
+ "step": 709
+ },
+ {
+ "epoch": 2.219206680584551,
+ "grad_norm": 1.0227075815200806,
+ "learning_rate": 3.729949175272942e-06,
+ "loss": 0.4428,
+ "step": 710
+ },
+ {
+ "epoch": 2.222338204592902,
+ "grad_norm": 0.7930364012718201,
+ "learning_rate": 3.7261779120612633e-06,
+ "loss": 0.5132,
+ "step": 711
+ },
+ {
+ "epoch": 2.2254697286012526,
+ "grad_norm": 0.9033688306808472,
+ "learning_rate": 3.7224029711304444e-06,
+ "loss": 0.476,
+ "step": 712
+ },
+ {
+ "epoch": 2.2286012526096033,
+ "grad_norm": 0.8026887774467468,
+ "learning_rate": 3.7186243638028007e-06,
+ "loss": 0.4959,
+ "step": 713
+ },
+ {
+ "epoch": 2.231732776617954,
+ "grad_norm": 0.9391745328903198,
+ "learning_rate": 3.714842101411642e-06,
+ "loss": 0.4962,
+ "step": 714
+ },
+ {
+ "epoch": 2.2348643006263047,
+ "grad_norm": 0.7774361371994019,
+ "learning_rate": 3.711056195301245e-06,
+ "loss": 0.4748,
+ "step": 715
+ },
+ {
+ "epoch": 2.2379958246346554,
+ "grad_norm": 0.9278722405433655,
+ "learning_rate": 3.7072666568268115e-06,
+ "loss": 0.5074,
+ "step": 716
+ },
+ {
+ "epoch": 2.241127348643006,
+ "grad_norm": 0.771423876285553,
+ "learning_rate": 3.7034734973544406e-06,
+ "loss": 0.5072,
+ "step": 717
+ },
+ {
+ "epoch": 2.244258872651357,
+ "grad_norm": 0.8707448244094849,
+ "learning_rate": 3.6996767282610892e-06,
+ "loss": 0.4851,
+ "step": 718
+ },
+ {
+ "epoch": 2.2473903966597075,
+ "grad_norm": 0.7641019821166992,
+ "learning_rate": 3.695876360934543e-06,
+ "loss": 0.4941,
+ "step": 719
+ },
+ {
+ "epoch": 2.2505219206680582,
+ "grad_norm": 0.7647167444229126,
+ "learning_rate": 3.69207240677338e-06,
+ "loss": 0.5225,
+ "step": 720
+ },
+ {
+ "epoch": 2.2536534446764094,
+ "grad_norm": 0.9108865261077881,
+ "learning_rate": 3.6882648771869345e-06,
+ "loss": 0.454,
+ "step": 721
+ },
+ {
+ "epoch": 2.25678496868476,
+ "grad_norm": 0.86728835105896,
+ "learning_rate": 3.6844537835952666e-06,
+ "loss": 0.4461,
+ "step": 722
+ },
+ {
+ "epoch": 2.259916492693111,
+ "grad_norm": 1.1055282354354858,
+ "learning_rate": 3.6806391374291238e-06,
+ "loss": 0.4618,
+ "step": 723
+ },
+ {
+ "epoch": 2.2630480167014615,
+ "grad_norm": 0.7591858506202698,
+ "learning_rate": 3.6768209501299116e-06,
+ "loss": 0.4901,
+ "step": 724
+ },
+ {
+ "epoch": 2.266179540709812,
+ "grad_norm": 0.7966359257698059,
+ "learning_rate": 3.6729992331496554e-06,
+ "loss": 0.5171,
+ "step": 725
+ },
+ {
+ "epoch": 2.269311064718163,
+ "grad_norm": 0.983161211013794,
+ "learning_rate": 3.6691739979509672e-06,
+ "loss": 0.4949,
+ "step": 726
+ },
+ {
+ "epoch": 2.2724425887265136,
+ "grad_norm": 0.9200037121772766,
+ "learning_rate": 3.6653452560070106e-06,
+ "loss": 0.5234,
+ "step": 727
+ },
+ {
+ "epoch": 2.2755741127348643,
+ "grad_norm": 1.0288461446762085,
+ "learning_rate": 3.6615130188014685e-06,
+ "loss": 0.4713,
+ "step": 728
+ },
+ {
+ "epoch": 2.278705636743215,
+ "grad_norm": 0.7325463891029358,
+ "learning_rate": 3.6576772978285065e-06,
+ "loss": 0.527,
+ "step": 729
+ },
+ {
+ "epoch": 2.2818371607515657,
+ "grad_norm": 1.0045446157455444,
+ "learning_rate": 3.6538381045927395e-06,
+ "loss": 0.5139,
+ "step": 730
+ },
+ {
+ "epoch": 2.2849686847599164,
+ "grad_norm": 0.7391849756240845,
+ "learning_rate": 3.6499954506091963e-06,
+ "loss": 0.4829,
+ "step": 731
+ },
+ {
+ "epoch": 2.288100208768267,
+ "grad_norm": 0.7808229923248291,
+ "learning_rate": 3.646149347403286e-06,
+ "loss": 0.4831,
+ "step": 732
+ },
+ {
+ "epoch": 2.291231732776618,
+ "grad_norm": 0.7056961059570312,
+ "learning_rate": 3.6422998065107628e-06,
+ "loss": 0.5066,
+ "step": 733
+ },
+ {
+ "epoch": 2.2943632567849686,
+ "grad_norm": 0.7498443126678467,
+ "learning_rate": 3.6384468394776935e-06,
+ "loss": 0.4724,
+ "step": 734
+ },
+ {
+ "epoch": 2.2974947807933193,
+ "grad_norm": 0.8511576056480408,
+ "learning_rate": 3.634590457860418e-06,
+ "loss": 0.5286,
+ "step": 735
+ },
+ {
+ "epoch": 2.30062630480167,
+ "grad_norm": 0.873635470867157,
+ "learning_rate": 3.63073067322552e-06,
+ "loss": 0.4751,
+ "step": 736
+ },
+ {
+ "epoch": 2.3037578288100207,
+ "grad_norm": 0.7427377104759216,
+ "learning_rate": 3.626867497149788e-06,
+ "loss": 0.475,
+ "step": 737
+ },
+ {
+ "epoch": 2.306889352818372,
+ "grad_norm": 1.0591017007827759,
+ "learning_rate": 3.623000941220186e-06,
+ "loss": 0.4591,
+ "step": 738
+ },
+ {
+ "epoch": 2.3100208768267225,
+ "grad_norm": 0.8767879009246826,
+ "learning_rate": 3.6191310170338114e-06,
+ "loss": 0.4673,
+ "step": 739
+ },
+ {
+ "epoch": 2.3131524008350732,
+ "grad_norm": 0.9156234860420227,
+ "learning_rate": 3.615257736197866e-06,
+ "loss": 0.4622,
+ "step": 740
+ },
+ {
+ "epoch": 2.316283924843424,
+ "grad_norm": 0.6743756532669067,
+ "learning_rate": 3.611381110329619e-06,
+ "loss": 0.4723,
+ "step": 741
+ },
+ {
+ "epoch": 2.3194154488517746,
+ "grad_norm": 0.8655558228492737,
+ "learning_rate": 3.6075011510563732e-06,
+ "loss": 0.471,
+ "step": 742
+ },
+ {
+ "epoch": 2.3225469728601253,
+ "grad_norm": 0.7652033567428589,
+ "learning_rate": 3.603617870015429e-06,
+ "loss": 0.5155,
+ "step": 743
+ },
+ {
+ "epoch": 2.325678496868476,
+ "grad_norm": 0.7970699667930603,
+ "learning_rate": 3.599731278854049e-06,
+ "loss": 0.4507,
+ "step": 744
+ },
+ {
+ "epoch": 2.3288100208768268,
+ "grad_norm": 0.7538278698921204,
+ "learning_rate": 3.5958413892294253e-06,
+ "loss": 0.5093,
+ "step": 745
+ },
+ {
+ "epoch": 2.3319415448851775,
+ "grad_norm": 0.735996663570404,
+ "learning_rate": 3.5919482128086414e-06,
+ "loss": 0.5008,
+ "step": 746
+ },
+ {
+ "epoch": 2.335073068893528,
+ "grad_norm": 0.7643904685974121,
+ "learning_rate": 3.588051761268642e-06,
+ "loss": 0.5072,
+ "step": 747
+ },
+ {
+ "epoch": 2.338204592901879,
+ "grad_norm": 0.7646260857582092,
+ "learning_rate": 3.584152046296191e-06,
+ "loss": 0.4578,
+ "step": 748
+ },
+ {
+ "epoch": 2.3413361169102296,
+ "grad_norm": 0.7873825430870056,
+ "learning_rate": 3.5802490795878446e-06,
+ "loss": 0.5249,
+ "step": 749
+ },
+ {
+ "epoch": 2.3444676409185803,
+ "grad_norm": 1.095333218574524,
+ "learning_rate": 3.5763428728499095e-06,
+ "loss": 0.4913,
+ "step": 750
+ },
+ {
+ "epoch": 2.347599164926931,
+ "grad_norm": 1.3425395488739014,
+ "learning_rate": 3.5724334377984107e-06,
+ "loss": 0.5317,
+ "step": 751
+ },
+ {
+ "epoch": 2.3507306889352817,
+ "grad_norm": 0.7151113748550415,
+ "learning_rate": 3.568520786159055e-06,
+ "loss": 0.5135,
+ "step": 752
+ },
+ {
+ "epoch": 2.3538622129436324,
+ "grad_norm": 0.8072878122329712,
+ "learning_rate": 3.5646049296672004e-06,
+ "loss": 0.4863,
+ "step": 753
+ },
+ {
+ "epoch": 2.356993736951983,
+ "grad_norm": 0.8040189743041992,
+ "learning_rate": 3.5606858800678123e-06,
+ "loss": 0.4668,
+ "step": 754
+ },
+ {
+ "epoch": 2.3601252609603343,
+ "grad_norm": 0.7749765515327454,
+ "learning_rate": 3.5567636491154385e-06,
+ "loss": 0.4681,
+ "step": 755
+ },
+ {
+ "epoch": 2.3632567849686845,
+ "grad_norm": 0.773013710975647,
+ "learning_rate": 3.5528382485741638e-06,
+ "loss": 0.5012,
+ "step": 756
+ },
+ {
+ "epoch": 2.3663883089770357,
+ "grad_norm": 0.7017714381217957,
+ "learning_rate": 3.5489096902175835e-06,
+ "loss": 0.5019,
+ "step": 757
+ },
+ {
+ "epoch": 2.3695198329853864,
+ "grad_norm": 1.132458209991455,
+ "learning_rate": 3.5449779858287625e-06,
+ "loss": 0.5131,
+ "step": 758
+ },
+ {
+ "epoch": 2.372651356993737,
+ "grad_norm": 0.7624574899673462,
+ "learning_rate": 3.541043147200202e-06,
+ "loss": 0.4856,
+ "step": 759
+ },
+ {
+ "epoch": 2.375782881002088,
+ "grad_norm": 0.9078478217124939,
+ "learning_rate": 3.5371051861338036e-06,
+ "loss": 0.4337,
+ "step": 760
+ },
+ {
+ "epoch": 2.3789144050104385,
+ "grad_norm": 0.8608354330062866,
+ "learning_rate": 3.5331641144408344e-06,
+ "loss": 0.5053,
+ "step": 761
+ },
+ {
+ "epoch": 2.382045929018789,
+ "grad_norm": 0.775047779083252,
+ "learning_rate": 3.529219943941892e-06,
+ "loss": 0.4779,
+ "step": 762
+ },
+ {
+ "epoch": 2.38517745302714,
+ "grad_norm": 0.7775866389274597,
+ "learning_rate": 3.525272686466866e-06,
+ "loss": 0.4979,
+ "step": 763
+ },
+ {
+ "epoch": 2.3883089770354906,
+ "grad_norm": 0.9386464357376099,
+ "learning_rate": 3.521322353854908e-06,
+ "loss": 0.5222,
+ "step": 764
+ },
+ {
+ "epoch": 2.3914405010438413,
+ "grad_norm": 0.874109148979187,
+ "learning_rate": 3.517368957954391e-06,
+ "loss": 0.4681,
+ "step": 765
+ },
+ {
+ "epoch": 2.394572025052192,
+ "grad_norm": 0.824588418006897,
+ "learning_rate": 3.5134125106228766e-06,
+ "loss": 0.4955,
+ "step": 766
+ },
+ {
+ "epoch": 2.3977035490605427,
+ "grad_norm": 0.8790764808654785,
+ "learning_rate": 3.5094530237270774e-06,
+ "loss": 0.4722,
+ "step": 767
+ },
+ {
+ "epoch": 2.4008350730688934,
+ "grad_norm": 1.1399786472320557,
+ "learning_rate": 3.5054905091428253e-06,
+ "loss": 0.4771,
+ "step": 768
+ },
+ {
+ "epoch": 2.403966597077244,
+ "grad_norm": 1.2586532831192017,
+ "learning_rate": 3.50152497875503e-06,
+ "loss": 0.4849,
+ "step": 769
+ },
+ {
+ "epoch": 2.407098121085595,
+ "grad_norm": 0.7706464529037476,
+ "learning_rate": 3.4975564444576487e-06,
+ "loss": 0.477,
+ "step": 770
+ },
+ {
+ "epoch": 2.4102296450939455,
+ "grad_norm": 0.7695909142494202,
+ "learning_rate": 3.4935849181536484e-06,
+ "loss": 0.4695,
+ "step": 771
+ },
+ {
+ "epoch": 2.4133611691022967,
+ "grad_norm": 0.7744433283805847,
+ "learning_rate": 3.489610411754969e-06,
+ "loss": 0.499,
+ "step": 772
+ },
+ {
+ "epoch": 2.416492693110647,
+ "grad_norm": 0.9265744686126709,
+ "learning_rate": 3.48563293718249e-06,
+ "loss": 0.481,
+ "step": 773
+ },
+ {
+ "epoch": 2.419624217118998,
+ "grad_norm": 1.0680506229400635,
+ "learning_rate": 3.481652506365992e-06,
+ "loss": 0.4898,
+ "step": 774
+ },
+ {
+ "epoch": 2.422755741127349,
+ "grad_norm": 0.721493661403656,
+ "learning_rate": 3.477669131244122e-06,
+ "loss": 0.4813,
+ "step": 775
+ },
+ {
+ "epoch": 2.4258872651356995,
+ "grad_norm": 0.7993559837341309,
+ "learning_rate": 3.4736828237643616e-06,
+ "loss": 0.5179,
+ "step": 776
+ },
+ {
+ "epoch": 2.4290187891440502,
+ "grad_norm": 0.8148090839385986,
+ "learning_rate": 3.4696935958829837e-06,
+ "loss": 0.4753,
+ "step": 777
+ },
+ {
+ "epoch": 2.432150313152401,
+ "grad_norm": 0.8006406426429749,
+ "learning_rate": 3.465701459565022e-06,
+ "loss": 0.501,
+ "step": 778
+ },
+ {
+ "epoch": 2.4352818371607516,
+ "grad_norm": 0.9307970404624939,
+ "learning_rate": 3.4617064267842327e-06,
+ "loss": 0.487,
+ "step": 779
+ },
+ {
+ "epoch": 2.4384133611691023,
+ "grad_norm": 0.7192814946174622,
+ "learning_rate": 3.45770850952306e-06,
+ "loss": 0.4769,
+ "step": 780
+ },
+ {
+ "epoch": 2.441544885177453,
+ "grad_norm": 0.7386271953582764,
+ "learning_rate": 3.4537077197726023e-06,
+ "loss": 0.4726,
+ "step": 781
+ },
+ {
+ "epoch": 2.4446764091858038,
+ "grad_norm": 0.8006314039230347,
+ "learning_rate": 3.449704069532567e-06,
+ "loss": 0.494,
+ "step": 782
+ },
+ {
+ "epoch": 2.4478079331941545,
+ "grad_norm": 0.7466752529144287,
+ "learning_rate": 3.4456975708112477e-06,
+ "loss": 0.4778,
+ "step": 783
+ },
+ {
+ "epoch": 2.450939457202505,
+ "grad_norm": 0.8348856568336487,
+ "learning_rate": 3.4416882356254777e-06,
+ "loss": 0.4766,
+ "step": 784
+ },
+ {
+ "epoch": 2.454070981210856,
+ "grad_norm": 0.754851758480072,
+ "learning_rate": 3.4376760760005994e-06,
+ "loss": 0.4673,
+ "step": 785
+ },
+ {
+ "epoch": 2.4572025052192066,
+ "grad_norm": 0.7854018807411194,
+ "learning_rate": 3.433661103970427e-06,
+ "loss": 0.4954,
+ "step": 786
+ },
+ {
+ "epoch": 2.4603340292275573,
+ "grad_norm": 0.7238256931304932,
+ "learning_rate": 3.4296433315772084e-06,
+ "loss": 0.496,
+ "step": 787
+ },
+ {
+ "epoch": 2.463465553235908,
+ "grad_norm": 0.7007659673690796,
+ "learning_rate": 3.4256227708715915e-06,
+ "loss": 0.4793,
+ "step": 788
+ },
+ {
+ "epoch": 2.4665970772442587,
+ "grad_norm": 0.7234371900558472,
+ "learning_rate": 3.421599433912588e-06,
+ "loss": 0.4935,
+ "step": 789
+ },
+ {
+ "epoch": 2.4697286012526094,
+ "grad_norm": 0.7537544965744019,
+ "learning_rate": 3.4175733327675355e-06,
+ "loss": 0.5194,
+ "step": 790
+ },
+ {
+ "epoch": 2.4728601252609606,
+ "grad_norm": 0.7608047127723694,
+ "learning_rate": 3.4135444795120633e-06,
+ "loss": 0.4793,
+ "step": 791
+ },
+ {
+ "epoch": 2.4759916492693113,
+ "grad_norm": 0.7847898006439209,
+ "learning_rate": 3.4095128862300542e-06,
+ "loss": 0.4877,
+ "step": 792
+ },
+ {
+ "epoch": 2.479123173277662,
+ "grad_norm": 0.8002011179924011,
+ "learning_rate": 3.405478565013609e-06,
+ "loss": 0.4927,
+ "step": 793
+ },
+ {
+ "epoch": 2.4822546972860127,
+ "grad_norm": 0.8200219869613647,
+ "learning_rate": 3.401441527963013e-06,
+ "loss": 0.4997,
+ "step": 794
+ },
+ {
+ "epoch": 2.4853862212943634,
+ "grad_norm": 0.7220162749290466,
+ "learning_rate": 3.3974017871866938e-06,
+ "loss": 0.4668,
+ "step": 795
+ },
+ {
+ "epoch": 2.488517745302714,
+ "grad_norm": 0.8022251129150391,
+ "learning_rate": 3.3933593548011912e-06,
+ "loss": 0.5179,
+ "step": 796
+ },
+ {
+ "epoch": 2.491649269311065,
+ "grad_norm": 0.7914465069770813,
+ "learning_rate": 3.389314242931115e-06,
+ "loss": 0.4943,
+ "step": 797
+ },
+ {
+ "epoch": 2.4947807933194155,
+ "grad_norm": 1.1399403810501099,
+ "learning_rate": 3.385266463709116e-06,
+ "loss": 0.4896,
+ "step": 798
+ },
+ {
+ "epoch": 2.497912317327766,
+ "grad_norm": 0.8098909854888916,
+ "learning_rate": 3.38121602927584e-06,
+ "loss": 0.4904,
+ "step": 799
+ },
+ {
+ "epoch": 2.501043841336117,
+ "grad_norm": 0.7434052228927612,
+ "learning_rate": 3.377162951779902e-06,
+ "loss": 0.4864,
+ "step": 800
+ },
+ {
+ "epoch": 2.5041753653444676,
+ "grad_norm": 0.7397809624671936,
+ "learning_rate": 3.3731072433778407e-06,
+ "loss": 0.486,
+ "step": 801
+ },
+ {
+ "epoch": 2.5073068893528183,
+ "grad_norm": 0.99027019739151,
+ "learning_rate": 3.3690489162340867e-06,
+ "loss": 0.5011,
+ "step": 802
+ },
+ {
+ "epoch": 2.510438413361169,
+ "grad_norm": 0.8443610668182373,
+ "learning_rate": 3.3649879825209246e-06,
+ "loss": 0.455,
+ "step": 803
+ },
+ {
+ "epoch": 2.5135699373695197,
+ "grad_norm": 0.755649983882904,
+ "learning_rate": 3.3609244544184604e-06,
+ "loss": 0.4563,
+ "step": 804
+ },
+ {
+ "epoch": 2.5167014613778704,
+ "grad_norm": 0.728018045425415,
+ "learning_rate": 3.3568583441145765e-06,
+ "loss": 0.471,
+ "step": 805
+ },
+ {
+ "epoch": 2.519832985386221,
+ "grad_norm": 0.7777130603790283,
+ "learning_rate": 3.352789663804904e-06,
+ "loss": 0.4667,
+ "step": 806
+ },
+ {
+ "epoch": 2.522964509394572,
+ "grad_norm": 0.7545619606971741,
+ "learning_rate": 3.3487184256927785e-06,
+ "loss": 0.4915,
+ "step": 807
+ },
+ {
+ "epoch": 2.526096033402923,
+ "grad_norm": 0.8374579548835754,
+ "learning_rate": 3.3446446419892127e-06,
+ "loss": 0.485,
+ "step": 808
+ },
+ {
+ "epoch": 2.5292275574112733,
+ "grad_norm": 0.7354666590690613,
+ "learning_rate": 3.340568324912849e-06,
+ "loss": 0.5254,
+ "step": 809
+ },
+ {
+ "epoch": 2.5323590814196244,
+ "grad_norm": 0.7581545114517212,
+ "learning_rate": 3.3364894866899324e-06,
+ "loss": 0.4483,
+ "step": 810
+ },
+ {
+ "epoch": 2.535490605427975,
+ "grad_norm": 0.8077559471130371,
+ "learning_rate": 3.3324081395542662e-06,
+ "loss": 0.5022,
+ "step": 811
+ },
+ {
+ "epoch": 2.538622129436326,
+ "grad_norm": 0.8827865719795227,
+ "learning_rate": 3.3283242957471806e-06,
+ "loss": 0.4909,
+ "step": 812
+ },
+ {
+ "epoch": 2.5417536534446765,
+ "grad_norm": 0.9139482378959656,
+ "learning_rate": 3.3242379675174953e-06,
+ "loss": 0.5205,
+ "step": 813
+ },
+ {
+ "epoch": 2.5448851774530272,
+ "grad_norm": 0.7616812586784363,
+ "learning_rate": 3.3201491671214797e-06,
+ "loss": 0.4744,
+ "step": 814
+ },
+ {
+ "epoch": 2.548016701461378,
+ "grad_norm": 0.987173318862915,
+ "learning_rate": 3.3160579068228183e-06,
+ "loss": 0.4876,
+ "step": 815
+ },
+ {
+ "epoch": 2.5511482254697286,
+ "grad_norm": 1.259137749671936,
+ "learning_rate": 3.311964198892574e-06,
+ "loss": 0.454,
+ "step": 816
+ },
+ {
+ "epoch": 2.5542797494780793,
+ "grad_norm": 0.7866336703300476,
+ "learning_rate": 3.3078680556091513e-06,
+ "loss": 0.5107,
+ "step": 817
+ },
+ {
+ "epoch": 2.55741127348643,
+ "grad_norm": 0.9311352372169495,
+ "learning_rate": 3.303769489258258e-06,
+ "loss": 0.4843,
+ "step": 818
+ },
+ {
+ "epoch": 2.5605427974947808,
+ "grad_norm": 0.8556346893310547,
+ "learning_rate": 3.299668512132872e-06,
+ "loss": 0.5017,
+ "step": 819
+ },
+ {
+ "epoch": 2.5636743215031315,
+ "grad_norm": 2.810598373413086,
+ "learning_rate": 3.2955651365331988e-06,
+ "loss": 0.5223,
+ "step": 820
+ },
+ {
+ "epoch": 2.566805845511482,
+ "grad_norm": 1.0120766162872314,
+ "learning_rate": 3.29145937476664e-06,
+ "loss": 0.4959,
+ "step": 821
+ },
+ {
+ "epoch": 2.569937369519833,
+ "grad_norm": 0.751412034034729,
+ "learning_rate": 3.287351239147752e-06,
+ "loss": 0.4941,
+ "step": 822
+ },
+ {
+ "epoch": 2.5730688935281836,
+ "grad_norm": 1.9308148622512817,
+ "learning_rate": 3.2832407419982136e-06,
+ "loss": 0.4965,
+ "step": 823
+ },
+ {
+ "epoch": 2.5762004175365343,
+ "grad_norm": 0.9215649962425232,
+ "learning_rate": 3.279127895646786e-06,
+ "loss": 0.5071,
+ "step": 824
+ },
+ {
+ "epoch": 2.5793319415448854,
+ "grad_norm": 0.7599574327468872,
+ "learning_rate": 3.2750127124292754e-06,
+ "loss": 0.5191,
+ "step": 825
+ },
+ {
+ "epoch": 2.5824634655532357,
+ "grad_norm": 0.8234940767288208,
+ "learning_rate": 3.270895204688496e-06,
+ "loss": 0.4947,
+ "step": 826
+ },
+ {
+ "epoch": 2.585594989561587,
+ "grad_norm": 0.8401572704315186,
+ "learning_rate": 3.266775384774238e-06,
+ "loss": 0.4547,
+ "step": 827
+ },
+ {
+ "epoch": 2.588726513569937,
+ "grad_norm": 0.8927991986274719,
+ "learning_rate": 3.262653265043223e-06,
+ "loss": 0.4296,
+ "step": 828
+ },
+ {
+ "epoch": 2.5918580375782883,
+ "grad_norm": 0.8009241223335266,
+ "learning_rate": 3.2585288578590716e-06,
+ "loss": 0.4578,
+ "step": 829
+ },
+ {
+ "epoch": 2.594989561586639,
+ "grad_norm": 0.7982021570205688,
+ "learning_rate": 3.2544021755922663e-06,
+ "loss": 0.4961,
+ "step": 830
+ },
+ {
+ "epoch": 2.5981210855949897,
+ "grad_norm": 0.7096095681190491,
+ "learning_rate": 3.2502732306201112e-06,
+ "loss": 0.4975,
+ "step": 831
+ },
+ {
+ "epoch": 2.6012526096033404,
+ "grad_norm": 1.1092045307159424,
+ "learning_rate": 3.246142035326699e-06,
+ "loss": 0.4705,
+ "step": 832
+ },
+ {
+ "epoch": 2.604384133611691,
+ "grad_norm": 0.785799503326416,
+ "learning_rate": 3.24200860210287e-06,
+ "loss": 0.479,
+ "step": 833
+ },
+ {
+ "epoch": 2.607515657620042,
+ "grad_norm": 0.7315773367881775,
+ "learning_rate": 3.2378729433461804e-06,
+ "loss": 0.5036,
+ "step": 834
+ },
+ {
+ "epoch": 2.6106471816283925,
+ "grad_norm": 0.7840189337730408,
+ "learning_rate": 3.233735071460856e-06,
+ "loss": 0.4967,
+ "step": 835
+ },
+ {
+ "epoch": 2.613778705636743,
+ "grad_norm": 0.7186565399169922,
+ "learning_rate": 3.2295949988577655e-06,
+ "loss": 0.4889,
+ "step": 836
+ },
+ {
+ "epoch": 2.616910229645094,
+ "grad_norm": 0.766054093837738,
+ "learning_rate": 3.2254527379543747e-06,
+ "loss": 0.539,
+ "step": 837
+ },
+ {
+ "epoch": 2.6200417536534446,
+ "grad_norm": 0.7705381512641907,
+ "learning_rate": 3.2213083011747165e-06,
+ "loss": 0.4968,
+ "step": 838
+ },
+ {
+ "epoch": 2.6231732776617953,
+ "grad_norm": 1.3530604839324951,
+ "learning_rate": 3.217161700949346e-06,
+ "loss": 0.52,
+ "step": 839
+ },
+ {
+ "epoch": 2.626304801670146,
+ "grad_norm": 0.737389862537384,
+ "learning_rate": 3.2130129497153107e-06,
+ "loss": 0.4823,
+ "step": 840
+ },
+ {
+ "epoch": 2.6294363256784967,
+ "grad_norm": 0.9121193885803223,
+ "learning_rate": 3.2088620599161064e-06,
+ "loss": 0.4592,
+ "step": 841
+ },
+ {
+ "epoch": 2.632567849686848,
+ "grad_norm": 0.8869616389274597,
+ "learning_rate": 3.2047090440016464e-06,
+ "loss": 0.5001,
+ "step": 842
+ },
+ {
+ "epoch": 2.635699373695198,
+ "grad_norm": 0.8447219133377075,
+ "learning_rate": 3.200553914428219e-06,
+ "loss": 0.4969,
+ "step": 843
+ },
+ {
+ "epoch": 2.6388308977035493,
+ "grad_norm": 0.8877657055854797,
+ "learning_rate": 3.1963966836584524e-06,
+ "loss": 0.4718,
+ "step": 844
+ },
+ {
+ "epoch": 2.6419624217118995,
+ "grad_norm": 1.045272946357727,
+ "learning_rate": 3.192237364161277e-06,
+ "loss": 0.4864,
+ "step": 845
+ },
+ {
+ "epoch": 2.6450939457202507,
+ "grad_norm": 0.8485913276672363,
+ "learning_rate": 3.1880759684118876e-06,
+ "loss": 0.4688,
+ "step": 846
+ },
+ {
+ "epoch": 2.6482254697286014,
+ "grad_norm": 0.7328930497169495,
+ "learning_rate": 3.183912508891709e-06,
+ "loss": 0.4728,
+ "step": 847
+ },
+ {
+ "epoch": 2.651356993736952,
+ "grad_norm": 0.7377315759658813,
+ "learning_rate": 3.179746998088351e-06,
+ "loss": 0.4672,
+ "step": 848
+ },
+ {
+ "epoch": 2.654488517745303,
+ "grad_norm": 0.8017002940177917,
+ "learning_rate": 3.1755794484955817e-06,
+ "loss": 0.4884,
+ "step": 849
+ },
+ {
+ "epoch": 2.6576200417536535,
+ "grad_norm": 1.045470952987671,
+ "learning_rate": 3.171409872613278e-06,
+ "loss": 0.4789,
+ "step": 850
+ },
+ {
+ "epoch": 2.6607515657620042,
+ "grad_norm": 0.8823987245559692,
+ "learning_rate": 3.1672382829473997e-06,
+ "loss": 0.5117,
+ "step": 851
+ },
+ {
+ "epoch": 2.663883089770355,
+ "grad_norm": 0.7395204901695251,
+ "learning_rate": 3.163064692009944e-06,
+ "loss": 0.5476,
+ "step": 852
+ },
+ {
+ "epoch": 2.6670146137787056,
+ "grad_norm": 0.7778941988945007,
+ "learning_rate": 3.1588891123189103e-06,
+ "loss": 0.5092,
+ "step": 853
+ },
+ {
+ "epoch": 2.6701461377870563,
+ "grad_norm": 0.8072531819343567,
+ "learning_rate": 3.1547115563982643e-06,
+ "loss": 0.4961,
+ "step": 854
+ },
+ {
+ "epoch": 2.673277661795407,
+ "grad_norm": 0.9018139243125916,
+ "learning_rate": 3.1505320367778993e-06,
+ "loss": 0.4624,
+ "step": 855
+ },
+ {
+ "epoch": 2.6764091858037578,
+ "grad_norm": 0.8554450869560242,
+ "learning_rate": 3.1463505659935957e-06,
+ "loss": 0.4971,
+ "step": 856
+ },
+ {
+ "epoch": 2.6795407098121085,
+ "grad_norm": 0.7727259397506714,
+ "learning_rate": 3.14216715658699e-06,
+ "loss": 0.4544,
+ "step": 857
+ },
+ {
+ "epoch": 2.682672233820459,
+ "grad_norm": 0.9253409504890442,
+ "learning_rate": 3.137981821105529e-06,
+ "loss": 0.4893,
+ "step": 858
+ },
+ {
+ "epoch": 2.68580375782881,
+ "grad_norm": 0.8809456825256348,
+ "learning_rate": 3.1337945721024403e-06,
+ "loss": 0.5242,
+ "step": 859
+ },
+ {
+ "epoch": 2.6889352818371606,
+ "grad_norm": 0.981755256652832,
+ "learning_rate": 3.129605422136689e-06,
+ "loss": 0.4686,
+ "step": 860
+ },
+ {
+ "epoch": 2.6920668058455117,
+ "grad_norm": 1.1278467178344727,
+ "learning_rate": 3.1254143837729412e-06,
+ "loss": 0.4813,
+ "step": 861
+ },
+ {
+ "epoch": 2.695198329853862,
+ "grad_norm": 0.8529123663902283,
+ "learning_rate": 3.1212214695815285e-06,
+ "loss": 0.4723,
+ "step": 862
+ },
+ {
+ "epoch": 2.698329853862213,
+ "grad_norm": 0.7764189839363098,
+ "learning_rate": 3.1170266921384075e-06,
+ "loss": 0.4777,
+ "step": 863
+ },
+ {
+ "epoch": 2.701461377870564,
+ "grad_norm": 0.7364740967750549,
+ "learning_rate": 3.112830064025124e-06,
+ "loss": 0.4975,
+ "step": 864
+ },
+ {
+ "epoch": 2.7045929018789145,
+ "grad_norm": 0.7594549059867859,
+ "learning_rate": 3.108631597828774e-06,
+ "loss": 0.5083,
+ "step": 865
+ },
+ {
+ "epoch": 2.7077244258872653,
+ "grad_norm": 0.7337073683738708,
+ "learning_rate": 3.104431306141968e-06,
+ "loss": 0.4778,
+ "step": 866
+ },
+ {
+ "epoch": 2.710855949895616,
+ "grad_norm": 0.7709932327270508,
+ "learning_rate": 3.1002292015627894e-06,
+ "loss": 0.4754,
+ "step": 867
+ },
+ {
+ "epoch": 2.7139874739039667,
+ "grad_norm": 0.8001313209533691,
+ "learning_rate": 3.0960252966947605e-06,
+ "loss": 0.4489,
+ "step": 868
+ },
+ {
+ "epoch": 2.7171189979123174,
+ "grad_norm": 0.8280592560768127,
+ "learning_rate": 3.091819604146804e-06,
+ "loss": 0.4606,
+ "step": 869
+ },
+ {
+ "epoch": 2.720250521920668,
+ "grad_norm": 0.7463534474372864,
+ "learning_rate": 3.0876121365332024e-06,
+ "loss": 0.5168,
+ "step": 870
+ },
+ {
+ "epoch": 2.723382045929019,
+ "grad_norm": 0.9011222124099731,
+ "learning_rate": 3.0834029064735636e-06,
+ "loss": 0.5163,
+ "step": 871
+ },
+ {
+ "epoch": 2.7265135699373695,
+ "grad_norm": 0.7811456322669983,
+ "learning_rate": 3.0791919265927827e-06,
+ "loss": 0.5004,
+ "step": 872
+ },
+ {
+ "epoch": 2.72964509394572,
+ "grad_norm": 0.9251837134361267,
+ "learning_rate": 3.0749792095210003e-06,
+ "loss": 0.5081,
+ "step": 873
+ },
+ {
+ "epoch": 2.732776617954071,
+ "grad_norm": 0.8347085118293762,
+ "learning_rate": 3.0707647678935695e-06,
+ "loss": 0.4793,
+ "step": 874
+ },
+ {
+ "epoch": 2.7359081419624216,
+ "grad_norm": 0.9766442179679871,
+ "learning_rate": 3.0665486143510153e-06,
+ "loss": 0.493,
+ "step": 875
+ },
+ {
+ "epoch": 2.7390396659707723,
+ "grad_norm": 0.7692548036575317,
+ "learning_rate": 3.0623307615389975e-06,
+ "loss": 0.4874,
+ "step": 876
+ },
+ {
+ "epoch": 2.742171189979123,
+ "grad_norm": 0.7714599370956421,
+ "learning_rate": 3.0581112221082727e-06,
+ "loss": 0.4929,
+ "step": 877
+ },
+ {
+ "epoch": 2.745302713987474,
+ "grad_norm": 0.7797786593437195,
+ "learning_rate": 3.053890008714655e-06,
+ "loss": 0.4359,
+ "step": 878
+ },
+ {
+ "epoch": 2.7484342379958244,
+ "grad_norm": 5.118397235870361,
+ "learning_rate": 3.049667134018981e-06,
+ "loss": 0.4634,
+ "step": 879
+ },
+ {
+ "epoch": 2.7515657620041756,
+ "grad_norm": 0.7684539556503296,
+ "learning_rate": 3.04544261068707e-06,
+ "loss": 0.4688,
+ "step": 880
+ },
+ {
+ "epoch": 2.754697286012526,
+ "grad_norm": 0.8678610920906067,
+ "learning_rate": 3.0412164513896846e-06,
+ "loss": 0.5213,
+ "step": 881
+ },
+ {
+ "epoch": 2.757828810020877,
+ "grad_norm": 0.80293869972229,
+ "learning_rate": 3.0369886688024954e-06,
+ "loss": 0.4392,
+ "step": 882
+ },
+ {
+ "epoch": 2.7609603340292277,
+ "grad_norm": 0.7438644766807556,
+ "learning_rate": 3.0327592756060412e-06,
+ "loss": 0.528,
+ "step": 883
+ },
+ {
+ "epoch": 2.7640918580375784,
+ "grad_norm": 0.7701645493507385,
+ "learning_rate": 3.0285282844856917e-06,
+ "loss": 0.504,
+ "step": 884
+ },
+ {
+ "epoch": 2.767223382045929,
+ "grad_norm": 0.7113856673240662,
+ "learning_rate": 3.024295708131611e-06,
+ "loss": 0.4819,
+ "step": 885
+ },
+ {
+ "epoch": 2.77035490605428,
+ "grad_norm": 1.2697532176971436,
+ "learning_rate": 3.020061559238714e-06,
+ "loss": 0.5009,
+ "step": 886
+ },
+ {
+ "epoch": 2.7734864300626305,
+ "grad_norm": 1.0299439430236816,
+ "learning_rate": 3.015825850506636e-06,
+ "loss": 0.4707,
+ "step": 887
+ },
+ {
+ "epoch": 2.776617954070981,
+ "grad_norm": 0.9703660607337952,
+ "learning_rate": 3.011588594639688e-06,
+ "loss": 0.4102,
+ "step": 888
+ },
+ {
+ "epoch": 2.779749478079332,
+ "grad_norm": 0.7357314825057983,
+ "learning_rate": 3.0073498043468247e-06,
+ "loss": 0.4649,
+ "step": 889
+ },
+ {
+ "epoch": 2.7828810020876826,
+ "grad_norm": 0.7815471291542053,
+ "learning_rate": 3.0031094923415993e-06,
+ "loss": 0.469,
+ "step": 890
+ },
+ {
+ "epoch": 2.7860125260960333,
+ "grad_norm": 0.7856019139289856,
+ "learning_rate": 2.9988676713421318e-06,
+ "loss": 0.4241,
+ "step": 891
+ },
+ {
+ "epoch": 2.789144050104384,
+ "grad_norm": 0.7668167352676392,
+ "learning_rate": 2.994624354071066e-06,
+ "loss": 0.5309,
+ "step": 892
+ },
+ {
+ "epoch": 2.7922755741127347,
+ "grad_norm": 0.7485945820808411,
+ "learning_rate": 2.990379553255535e-06,
+ "loss": 0.5173,
+ "step": 893
+ },
+ {
+ "epoch": 2.7954070981210855,
+ "grad_norm": 0.8065824508666992,
+ "learning_rate": 2.986133281627123e-06,
+ "loss": 0.4995,
+ "step": 894
+ },
+ {
+ "epoch": 2.798538622129436,
+ "grad_norm": 0.7156995534896851,
+ "learning_rate": 2.9818855519218217e-06,
+ "loss": 0.4642,
+ "step": 895
+ },
+ {
+ "epoch": 2.801670146137787,
+ "grad_norm": 0.9115403890609741,
+ "learning_rate": 2.97763637688e-06,
+ "loss": 0.4799,
+ "step": 896
+ },
+ {
+ "epoch": 2.804801670146138,
+ "grad_norm": 0.7466689944267273,
+ "learning_rate": 2.9733857692463584e-06,
+ "loss": 0.4942,
+ "step": 897
+ },
+ {
+ "epoch": 2.8079331941544883,
+ "grad_norm": 0.7484914064407349,
+ "learning_rate": 2.9691337417698974e-06,
+ "loss": 0.4618,
+ "step": 898
+ },
+ {
+ "epoch": 2.8110647181628394,
+ "grad_norm": 0.816704511642456,
+ "learning_rate": 2.9648803072038736e-06,
+ "loss": 0.4748,
+ "step": 899
+ },
+ {
+ "epoch": 2.81419624217119,
+ "grad_norm": 0.7627584934234619,
+ "learning_rate": 2.9606254783057666e-06,
+ "loss": 0.4667,
+ "step": 900
+ },
+ {
+ "epoch": 2.817327766179541,
+ "grad_norm": 0.7341011166572571,
+ "learning_rate": 2.9563692678372342e-06,
+ "loss": 0.4802,
+ "step": 901
+ },
+ {
+ "epoch": 2.8204592901878915,
+ "grad_norm": 1.2541382312774658,
+ "learning_rate": 2.952111688564082e-06,
+ "loss": 0.5231,
+ "step": 902
+ },
+ {
+ "epoch": 2.8235908141962422,
+ "grad_norm": 0.7172819375991821,
+ "learning_rate": 2.9478527532562184e-06,
+ "loss": 0.4488,
+ "step": 903
+ },
+ {
+ "epoch": 2.826722338204593,
+ "grad_norm": 0.774529218673706,
+ "learning_rate": 2.943592474687621e-06,
+ "loss": 0.4964,
+ "step": 904
+ },
+ {
+ "epoch": 2.8298538622129437,
+ "grad_norm": 0.7315672636032104,
+ "learning_rate": 2.939330865636294e-06,
+ "loss": 0.4817,
+ "step": 905
+ },
+ {
+ "epoch": 2.8329853862212944,
+ "grad_norm": 0.7698234915733337,
+ "learning_rate": 2.9350679388842347e-06,
+ "loss": 0.5075,
+ "step": 906
+ },
+ {
+ "epoch": 2.836116910229645,
+ "grad_norm": 0.7717766761779785,
+ "learning_rate": 2.93080370721739e-06,
+ "loss": 0.4789,
+ "step": 907
+ },
+ {
+ "epoch": 2.8392484342379958,
+ "grad_norm": 0.7383570075035095,
+ "learning_rate": 2.926538183425622e-06,
+ "loss": 0.4992,
+ "step": 908
+ },
+ {
+ "epoch": 2.8423799582463465,
+ "grad_norm": 0.7858864068984985,
+ "learning_rate": 2.92227138030267e-06,
+ "loss": 0.4993,
+ "step": 909
+ },
+ {
+ "epoch": 2.845511482254697,
+ "grad_norm": 0.8220369219779968,
+ "learning_rate": 2.9180033106461076e-06,
+ "loss": 0.4929,
+ "step": 910
+ },
+ {
+ "epoch": 2.848643006263048,
+ "grad_norm": 0.7507152557373047,
+ "learning_rate": 2.9137339872573086e-06,
+ "loss": 0.4394,
+ "step": 911
+ },
+ {
+ "epoch": 2.8517745302713986,
+ "grad_norm": 0.7935269474983215,
+ "learning_rate": 2.9094634229414063e-06,
+ "loss": 0.4656,
+ "step": 912
+ },
+ {
+ "epoch": 2.8549060542797493,
+ "grad_norm": 0.9187721610069275,
+ "learning_rate": 2.9051916305072576e-06,
+ "loss": 0.4918,
+ "step": 913
+ },
+ {
+ "epoch": 2.8580375782881005,
+ "grad_norm": 0.8699706792831421,
+ "learning_rate": 2.9009186227674e-06,
+ "loss": 0.5106,
+ "step": 914
+ },
+ {
+ "epoch": 2.8611691022964507,
+ "grad_norm": 0.7175673246383667,
+ "learning_rate": 2.896644412538021e-06,
+ "loss": 0.5105,
+ "step": 915
+ },
+ {
+ "epoch": 2.864300626304802,
+ "grad_norm": 0.8563990592956543,
+ "learning_rate": 2.892369012638909e-06,
+ "loss": 0.4993,
+ "step": 916
+ },
+ {
+ "epoch": 2.867432150313152,
+ "grad_norm": 0.7891882658004761,
+ "learning_rate": 2.8880924358934246e-06,
+ "loss": 0.4983,
+ "step": 917
+ },
+ {
+ "epoch": 2.8705636743215033,
+ "grad_norm": 0.9247110486030579,
+ "learning_rate": 2.8838146951284575e-06,
+ "loss": 0.4789,
+ "step": 918
+ },
+ {
+ "epoch": 2.873695198329854,
+ "grad_norm": 0.7523055672645569,
+ "learning_rate": 2.879535803174387e-06,
+ "loss": 0.4982,
+ "step": 919
+ },
+ {
+ "epoch": 2.8768267223382047,
+ "grad_norm": 0.8096909523010254,
+ "learning_rate": 2.8752557728650467e-06,
+ "loss": 0.4958,
+ "step": 920
+ },
+ {
+ "epoch": 2.8799582463465554,
+ "grad_norm": 2.3476874828338623,
+ "learning_rate": 2.870974617037684e-06,
+ "loss": 0.491,
+ "step": 921
+ },
+ {
+ "epoch": 2.883089770354906,
+ "grad_norm": 0.8388578295707703,
+ "learning_rate": 2.8666923485329224e-06,
+ "loss": 0.5275,
+ "step": 922
+ },
+ {
+ "epoch": 2.886221294363257,
+ "grad_norm": 0.8162729144096375,
+ "learning_rate": 2.8624089801947234e-06,
+ "loss": 0.4776,
+ "step": 923
+ },
+ {
+ "epoch": 2.8893528183716075,
+ "grad_norm": 0.7306103110313416,
+ "learning_rate": 2.858124524870345e-06,
+ "loss": 0.4814,
+ "step": 924
+ },
+ {
+ "epoch": 2.892484342379958,
+ "grad_norm": 0.8736817836761475,
+ "learning_rate": 2.853838995410307e-06,
+ "loss": 0.5097,
+ "step": 925
+ },
+ {
+ "epoch": 2.895615866388309,
+ "grad_norm": 0.7771823406219482,
+ "learning_rate": 2.8495524046683525e-06,
+ "loss": 0.4806,
+ "step": 926
+ },
+ {
+ "epoch": 2.8987473903966596,
+ "grad_norm": 0.9421334862709045,
+ "learning_rate": 2.845264765501404e-06,
+ "loss": 0.5055,
+ "step": 927
+ },
+ {
+ "epoch": 2.9018789144050103,
+ "grad_norm": 0.8403921127319336,
+ "learning_rate": 2.8409760907695314e-06,
+ "loss": 0.4775,
+ "step": 928
+ },
+ {
+ "epoch": 2.905010438413361,
+ "grad_norm": 0.8095362186431885,
+ "learning_rate": 2.836686393335909e-06,
+ "loss": 0.4532,
+ "step": 929
+ },
+ {
+ "epoch": 2.9081419624217117,
+ "grad_norm": 0.7340645790100098,
+ "learning_rate": 2.8323956860667813e-06,
+ "loss": 0.4835,
+ "step": 930
+ },
+ {
+ "epoch": 2.911273486430063,
+ "grad_norm": 0.6970911026000977,
+ "learning_rate": 2.828103981831417e-06,
+ "loss": 0.4999,
+ "step": 931
+ },
+ {
+ "epoch": 2.914405010438413,
+ "grad_norm": 0.8136418461799622,
+ "learning_rate": 2.8238112935020794e-06,
+ "loss": 0.5038,
+ "step": 932
+ },
+ {
+ "epoch": 2.9175365344467643,
+ "grad_norm": 0.9045608043670654,
+ "learning_rate": 2.8195176339539816e-06,
+ "loss": 0.486,
+ "step": 933
+ },
+ {
+ "epoch": 2.9206680584551146,
+ "grad_norm": 1.14940345287323,
+ "learning_rate": 2.815223016065249e-06,
+ "loss": 0.5079,
+ "step": 934
+ },
+ {
+ "epoch": 2.9237995824634657,
+ "grad_norm": 0.7411190867424011,
+ "learning_rate": 2.8109274527168826e-06,
+ "loss": 0.4564,
+ "step": 935
+ },
+ {
+ "epoch": 2.9269311064718164,
+ "grad_norm": 0.8903455138206482,
+ "learning_rate": 2.806630956792719e-06,
+ "loss": 0.451,
+ "step": 936
+ },
+ {
+ "epoch": 2.930062630480167,
+ "grad_norm": 0.7865445017814636,
+ "learning_rate": 2.8023335411793904e-06,
+ "loss": 0.4658,
+ "step": 937
+ },
+ {
+ "epoch": 2.933194154488518,
+ "grad_norm": 0.8185790777206421,
+ "learning_rate": 2.798035218766292e-06,
+ "loss": 0.4776,
+ "step": 938
+ },
+ {
+ "epoch": 2.9363256784968685,
+ "grad_norm": 0.7516276836395264,
+ "learning_rate": 2.793736002445531e-06,
+ "loss": 0.4447,
+ "step": 939
+ },
+ {
+ "epoch": 2.9394572025052192,
+ "grad_norm": 0.738080620765686,
+ "learning_rate": 2.789435905111903e-06,
+ "loss": 0.4832,
+ "step": 940
+ },
+ {
+ "epoch": 2.94258872651357,
+ "grad_norm": 0.7971507906913757,
+ "learning_rate": 2.785134939662843e-06,
+ "loss": 0.4835,
+ "step": 941
+ },
+ {
+ "epoch": 2.9457202505219207,
+ "grad_norm": 0.7529093623161316,
+ "learning_rate": 2.78083311899839e-06,
+ "loss": 0.4759,
+ "step": 942
+ },
+ {
+ "epoch": 2.9488517745302714,
+ "grad_norm": 0.8222358226776123,
+ "learning_rate": 2.7765304560211482e-06,
+ "loss": 0.4365,
+ "step": 943
+ },
+ {
+ "epoch": 2.951983298538622,
+ "grad_norm": 0.729945182800293,
+ "learning_rate": 2.7722269636362462e-06,
+ "loss": 0.5026,
+ "step": 944
+ },
+ {
+ "epoch": 2.9551148225469728,
+ "grad_norm": 0.7287900447845459,
+ "learning_rate": 2.767922654751306e-06,
+ "loss": 0.4916,
+ "step": 945
+ },
+ {
+ "epoch": 2.9582463465553235,
+ "grad_norm": 0.869637131690979,
+ "learning_rate": 2.763617542276391e-06,
+ "loss": 0.5018,
+ "step": 946
+ },
+ {
+ "epoch": 2.961377870563674,
+ "grad_norm": 1.004909634590149,
+ "learning_rate": 2.7593116391239806e-06,
+ "loss": 0.5152,
+ "step": 947
+ },
+ {
+ "epoch": 2.964509394572025,
+ "grad_norm": 0.8263046145439148,
+ "learning_rate": 2.7550049582089235e-06,
+ "loss": 0.5249,
+ "step": 948
+ },
+ {
+ "epoch": 2.9676409185803756,
+ "grad_norm": 0.7963895797729492,
+ "learning_rate": 2.750697512448401e-06,
+ "loss": 0.5084,
+ "step": 949
+ },
+ {
+ "epoch": 2.9707724425887267,
+ "grad_norm": 0.7211249470710754,
+ "learning_rate": 2.7463893147618893e-06,
+ "loss": 0.4691,
+ "step": 950
+ },
+ {
+ "epoch": 2.973903966597077,
+ "grad_norm": 0.8010216951370239,
+ "learning_rate": 2.742080378071118e-06,
+ "loss": 0.5026,
+ "step": 951
+ },
+ {
+ "epoch": 2.977035490605428,
+ "grad_norm": 0.780078649520874,
+ "learning_rate": 2.7377707153000356e-06,
+ "loss": 0.4758,
+ "step": 952
+ },
+ {
+ "epoch": 2.980167014613779,
+ "grad_norm": 0.7728193998336792,
+ "learning_rate": 2.7334603393747684e-06,
+ "loss": 0.488,
+ "step": 953
+ },
+ {
+ "epoch": 2.9832985386221296,
+ "grad_norm": 0.836329996585846,
+ "learning_rate": 2.7291492632235777e-06,
+ "loss": 0.456,
+ "step": 954
+ },
+ {
+ "epoch": 2.9864300626304803,
+ "grad_norm": 0.7241990566253662,
+ "learning_rate": 2.724837499776831e-06,
+ "loss": 0.4953,
+ "step": 955
+ },
+ {
+ "epoch": 2.989561586638831,
+ "grad_norm": 0.7595076560974121,
+ "learning_rate": 2.7205250619669527e-06,
+ "loss": 0.446,
+ "step": 956
+ },
+ {
+ "epoch": 2.9926931106471817,
+ "grad_norm": 0.8177686333656311,
+ "learning_rate": 2.716211962728392e-06,
+ "loss": 0.5057,
+ "step": 957
+ },
+ {
+ "epoch": 2.9958246346555324,
+ "grad_norm": 0.7506977915763855,
+ "learning_rate": 2.71189821499758e-06,
+ "loss": 0.4821,
+ "step": 958
+ },
+ {
+ "epoch": 2.998956158663883,
+ "grad_norm": 0.8085163235664368,
+ "learning_rate": 2.7075838317128943e-06,
+ "loss": 0.5002,
+ "step": 959
+ },
+ {
+ "epoch": 3.0,
+ "grad_norm": 3.3674418926239014,
+ "learning_rate": 2.7032688258146207e-06,
+ "loss": 0.183,
+ "step": 960
+ },
+ {
+ "epoch": 3.0031315240083507,
+ "grad_norm": 1.066100835800171,
+ "learning_rate": 2.698953210244908e-06,
+ "loss": 0.4427,
+ "step": 961
+ },
+ {
+ "epoch": 3.0062630480167014,
+ "grad_norm": 0.7690210938453674,
+ "learning_rate": 2.6946369979477365e-06,
+ "loss": 0.4638,
+ "step": 962
+ },
+ {
+ "epoch": 3.009394572025052,
+ "grad_norm": 0.8114679455757141,
+ "learning_rate": 2.690320201868876e-06,
+ "loss": 0.4373,
+ "step": 963
+ },
+ {
+ "epoch": 3.012526096033403,
+ "grad_norm": 0.7680971622467041,
+ "learning_rate": 2.686002834955847e-06,
+ "loss": 0.4477,
+ "step": 964
+ },
+ {
+ "epoch": 3.0156576200417535,
+ "grad_norm": 0.7194678783416748,
+ "learning_rate": 2.6816849101578808e-06,
+ "loss": 0.455,
+ "step": 965
+ },
+ {
+ "epoch": 3.0187891440501042,
+ "grad_norm": 0.6890467405319214,
+ "learning_rate": 2.6773664404258854e-06,
+ "loss": 0.4246,
+ "step": 966
+ },
+ {
+ "epoch": 3.021920668058455,
+ "grad_norm": 0.8064301013946533,
+ "learning_rate": 2.6730474387123987e-06,
+ "loss": 0.497,
+ "step": 967
+ },
+ {
+ "epoch": 3.0250521920668056,
+ "grad_norm": 0.8164849281311035,
+ "learning_rate": 2.668727917971559e-06,
+ "loss": 0.457,
+ "step": 968
+ },
+ {
+ "epoch": 3.028183716075157,
+ "grad_norm": 0.7793440818786621,
+ "learning_rate": 2.6644078911590565e-06,
+ "loss": 0.4632,
+ "step": 969
+ },
+ {
+ "epoch": 3.0313152400835075,
+ "grad_norm": 0.7208535671234131,
+ "learning_rate": 2.6600873712321033e-06,
+ "loss": 0.4786,
+ "step": 970
+ },
+ {
+ "epoch": 3.034446764091858,
+ "grad_norm": 0.8905500769615173,
+ "learning_rate": 2.655766371149389e-06,
+ "loss": 0.4317,
+ "step": 971
+ },
+ {
+ "epoch": 3.037578288100209,
+ "grad_norm": 0.7537338733673096,
+ "learning_rate": 2.6514449038710418e-06,
+ "loss": 0.4783,
+ "step": 972
+ },
+ {
+ "epoch": 3.0407098121085596,
+ "grad_norm": 0.7901656031608582,
+ "learning_rate": 2.6471229823585937e-06,
+ "loss": 0.4762,
+ "step": 973
+ },
+ {
+ "epoch": 3.0438413361169103,
+ "grad_norm": 0.7427340745925903,
+ "learning_rate": 2.6428006195749373e-06,
+ "loss": 0.4782,
+ "step": 974
+ },
+ {
+ "epoch": 3.046972860125261,
+ "grad_norm": 0.8364680409431458,
+ "learning_rate": 2.6384778284842905e-06,
+ "loss": 0.4551,
+ "step": 975
+ },
+ {
+ "epoch": 3.0501043841336117,
+ "grad_norm": 1.1049500703811646,
+ "learning_rate": 2.634154622052155e-06,
+ "loss": 0.4451,
+ "step": 976
+ },
+ {
+ "epoch": 3.0532359081419624,
+ "grad_norm": 1.0797414779663086,
+ "learning_rate": 2.6298310132452757e-06,
+ "loss": 0.4512,
+ "step": 977
+ },
+ {
+ "epoch": 3.056367432150313,
+ "grad_norm": 0.8832088708877563,
+ "learning_rate": 2.62550701503161e-06,
+ "loss": 0.4569,
+ "step": 978
+ },
+ {
+ "epoch": 3.059498956158664,
+ "grad_norm": 0.748951256275177,
+ "learning_rate": 2.621182640380277e-06,
+ "loss": 0.4535,
+ "step": 979
+ },
+ {
+ "epoch": 3.0626304801670146,
+ "grad_norm": 0.776386022567749,
+ "learning_rate": 2.616857902261529e-06,
+ "loss": 0.4582,
+ "step": 980
+ },
+ {
+ "epoch": 3.0657620041753653,
+ "grad_norm": 0.730057418346405,
+ "learning_rate": 2.6125328136467074e-06,
+ "loss": 0.4581,
+ "step": 981
+ },
+ {
+ "epoch": 3.068893528183716,
+ "grad_norm": 0.7918877005577087,
+ "learning_rate": 2.6082073875082046e-06,
+ "loss": 0.4355,
+ "step": 982
+ },
+ {
+ "epoch": 3.0720250521920667,
+ "grad_norm": 0.803987443447113,
+ "learning_rate": 2.6038816368194265e-06,
+ "loss": 0.455,
+ "step": 983
+ },
+ {
+ "epoch": 3.0751565762004174,
+ "grad_norm": 1.2970365285873413,
+ "learning_rate": 2.599555574554749e-06,
+ "loss": 0.448,
+ "step": 984
+ },
+ {
+ "epoch": 3.078288100208768,
+ "grad_norm": 0.7814194560050964,
+ "learning_rate": 2.595229213689487e-06,
+ "loss": 0.4197,
+ "step": 985
+ },
+ {
+ "epoch": 3.081419624217119,
+ "grad_norm": 0.9865803122520447,
+ "learning_rate": 2.5909025671998483e-06,
+ "loss": 0.4649,
+ "step": 986
+ },
+ {
+ "epoch": 3.08455114822547,
+ "grad_norm": 0.7769168615341187,
+ "learning_rate": 2.586575648062898e-06,
+ "loss": 0.4287,
+ "step": 987
+ },
+ {
+ "epoch": 3.0876826722338206,
+ "grad_norm": 1.454988956451416,
+ "learning_rate": 2.582248469256519e-06,
+ "loss": 0.4548,
+ "step": 988
+ },
+ {
+ "epoch": 3.0908141962421714,
+ "grad_norm": 0.7656151056289673,
+ "learning_rate": 2.577921043759372e-06,
+ "loss": 0.4323,
+ "step": 989
+ },
+ {
+ "epoch": 3.093945720250522,
+ "grad_norm": 0.7989768385887146,
+ "learning_rate": 2.5735933845508598e-06,
+ "loss": 0.4616,
+ "step": 990
+ },
+ {
+ "epoch": 3.0970772442588728,
+ "grad_norm": 0.8711255192756653,
+ "learning_rate": 2.5692655046110855e-06,
+ "loss": 0.4923,
+ "step": 991
+ },
+ {
+ "epoch": 3.1002087682672235,
+ "grad_norm": 0.7333446145057678,
+ "learning_rate": 2.564937416920813e-06,
+ "loss": 0.4297,
+ "step": 992
+ },
+ {
+ "epoch": 3.103340292275574,
+ "grad_norm": 0.7611051201820374,
+ "learning_rate": 2.5606091344614297e-06,
+ "loss": 0.4276,
+ "step": 993
+ },
+ {
+ "epoch": 3.106471816283925,
+ "grad_norm": 0.9742305278778076,
+ "learning_rate": 2.5562806702149083e-06,
+ "loss": 0.4597,
+ "step": 994
+ },
+ {
+ "epoch": 3.1096033402922756,
+ "grad_norm": 0.8043314218521118,
+ "learning_rate": 2.551952037163765e-06,
+ "loss": 0.459,
+ "step": 995
+ },
+ {
+ "epoch": 3.1127348643006263,
+ "grad_norm": 0.7217118740081787,
+ "learning_rate": 2.5476232482910253e-06,
+ "loss": 0.4514,
+ "step": 996
+ },
+ {
+ "epoch": 3.115866388308977,
+ "grad_norm": 1.1410573720932007,
+ "learning_rate": 2.5432943165801765e-06,
+ "loss": 0.4942,
+ "step": 997
+ },
+ {
+ "epoch": 3.1189979123173277,
+ "grad_norm": 0.7188895344734192,
+ "learning_rate": 2.5389652550151416e-06,
+ "loss": 0.4641,
+ "step": 998
+ },
+ {
+ "epoch": 3.1221294363256784,
+ "grad_norm": 0.7223486304283142,
+ "learning_rate": 2.5346360765802276e-06,
+ "loss": 0.4382,
+ "step": 999
+ },
+ {
+ "epoch": 3.125260960334029,
+ "grad_norm": 0.8269757032394409,
+ "learning_rate": 2.5303067942600933e-06,
+ "loss": 0.4502,
+ "step": 1000
+ },
+ {
+ "epoch": 3.12839248434238,
+ "grad_norm": 0.7081235647201538,
+ "learning_rate": 2.5259774210397104e-06,
+ "loss": 0.4389,
+ "step": 1001
+ },
+ {
+ "epoch": 3.1315240083507305,
+ "grad_norm": 0.7493525147438049,
+ "learning_rate": 2.5216479699043224e-06,
+ "loss": 0.4697,
+ "step": 1002
+ },
+ {
+ "epoch": 3.1346555323590812,
+ "grad_norm": 0.7633835077285767,
+ "learning_rate": 2.5173184538394054e-06,
+ "loss": 0.4535,
+ "step": 1003
+ },
+ {
+ "epoch": 3.137787056367432,
+ "grad_norm": 0.7765631079673767,
+ "learning_rate": 2.5129888858306333e-06,
+ "loss": 0.4616,
+ "step": 1004
+ },
+ {
+ "epoch": 3.140918580375783,
+ "grad_norm": 0.7026706337928772,
+ "learning_rate": 2.508659278863832e-06,
+ "loss": 0.4535,
+ "step": 1005
+ },
+ {
+ "epoch": 3.144050104384134,
+ "grad_norm": 0.8816738128662109,
+ "learning_rate": 2.5043296459249466e-06,
+ "loss": 0.4274,
+ "step": 1006
+ },
+ {
+ "epoch": 3.1471816283924845,
+ "grad_norm": 0.8072274327278137,
+ "learning_rate": 2.5e-06,
+ "loss": 0.4566,
+ "step": 1007
+ },
+ {
+ "epoch": 3.150313152400835,
+ "grad_norm": 0.7775781750679016,
+ "learning_rate": 2.4956703540750542e-06,
+ "loss": 0.4248,
+ "step": 1008
+ },
+ {
+ "epoch": 3.153444676409186,
+ "grad_norm": 0.9271306991577148,
+ "learning_rate": 2.4913407211361686e-06,
+ "loss": 0.4251,
+ "step": 1009
+ },
+ {
+ "epoch": 3.1565762004175366,
+ "grad_norm": 0.763867974281311,
+ "learning_rate": 2.487011114169368e-06,
+ "loss": 0.4525,
+ "step": 1010
+ },
+ {
+ "epoch": 3.1597077244258873,
+ "grad_norm": 0.8754820227622986,
+ "learning_rate": 2.4826815461605955e-06,
+ "loss": 0.4524,
+ "step": 1011
+ },
+ {
+ "epoch": 3.162839248434238,
+ "grad_norm": 0.8261513113975525,
+ "learning_rate": 2.4783520300956784e-06,
+ "loss": 0.4303,
+ "step": 1012
+ },
+ {
+ "epoch": 3.1659707724425887,
+ "grad_norm": 0.8629854917526245,
+ "learning_rate": 2.4740225789602905e-06,
+ "loss": 0.4628,
+ "step": 1013
+ },
+ {
+ "epoch": 3.1691022964509394,
+ "grad_norm": 0.7281518578529358,
+ "learning_rate": 2.469693205739907e-06,
+ "loss": 0.461,
+ "step": 1014
+ },
+ {
+ "epoch": 3.17223382045929,
+ "grad_norm": 0.757644772529602,
+ "learning_rate": 2.465363923419774e-06,
+ "loss": 0.4455,
+ "step": 1015
+ },
+ {
+ "epoch": 3.175365344467641,
+ "grad_norm": 0.8021314740180969,
+ "learning_rate": 2.4610347449848592e-06,
+ "loss": 0.4755,
+ "step": 1016
+ },
+ {
+ "epoch": 3.1784968684759916,
+ "grad_norm": 0.7467564940452576,
+ "learning_rate": 2.456705683419824e-06,
+ "loss": 0.4419,
+ "step": 1017
+ },
+ {
+ "epoch": 3.1816283924843423,
+ "grad_norm": 0.8575125336647034,
+ "learning_rate": 2.452376751708976e-06,
+ "loss": 0.4577,
+ "step": 1018
+ },
+ {
+ "epoch": 3.184759916492693,
+ "grad_norm": 0.8101590871810913,
+ "learning_rate": 2.448047962836235e-06,
+ "loss": 0.4546,
+ "step": 1019
+ },
+ {
+ "epoch": 3.1878914405010437,
+ "grad_norm": 0.8146190643310547,
+ "learning_rate": 2.443719329785093e-06,
+ "loss": 0.4219,
+ "step": 1020
+ },
+ {
+ "epoch": 3.1910229645093944,
+ "grad_norm": 1.2800556421279907,
+ "learning_rate": 2.4393908655385708e-06,
+ "loss": 0.4502,
+ "step": 1021
+ },
+ {
+ "epoch": 3.1941544885177455,
+ "grad_norm": 0.7362221479415894,
+ "learning_rate": 2.4350625830791875e-06,
+ "loss": 0.4802,
+ "step": 1022
+ },
+ {
+ "epoch": 3.1972860125260962,
+ "grad_norm": 0.8307201862335205,
+ "learning_rate": 2.430734495388915e-06,
+ "loss": 0.4808,
+ "step": 1023
+ },
+ {
+ "epoch": 3.200417536534447,
+ "grad_norm": 0.8467394709587097,
+ "learning_rate": 2.42640661544914e-06,
+ "loss": 0.4598,
+ "step": 1024
+ },
+ {
+ "epoch": 3.2035490605427976,
+ "grad_norm": 0.9570673108100891,
+ "learning_rate": 2.422078956240629e-06,
+ "loss": 0.4341,
+ "step": 1025
+ },
+ {
+ "epoch": 3.2066805845511483,
+ "grad_norm": 0.7976422309875488,
+ "learning_rate": 2.4177515307434824e-06,
+ "loss": 0.4285,
+ "step": 1026
+ },
+ {
+ "epoch": 3.209812108559499,
+ "grad_norm": 0.8241607546806335,
+ "learning_rate": 2.413424351937103e-06,
+ "loss": 0.4563,
+ "step": 1027
+ },
+ {
+ "epoch": 3.2129436325678498,
+ "grad_norm": 0.7191072702407837,
+ "learning_rate": 2.4090974328001526e-06,
+ "loss": 0.456,
+ "step": 1028
+ },
+ {
+ "epoch": 3.2160751565762005,
+ "grad_norm": 0.8112174868583679,
+ "learning_rate": 2.4047707863105133e-06,
+ "loss": 0.4592,
+ "step": 1029
+ },
+ {
+ "epoch": 3.219206680584551,
+ "grad_norm": 0.7170486450195312,
+ "learning_rate": 2.4004444254452522e-06,
+ "loss": 0.4559,
+ "step": 1030
+ },
+ {
+ "epoch": 3.222338204592902,
+ "grad_norm": 0.7092299461364746,
+ "learning_rate": 2.3961183631805748e-06,
+ "loss": 0.4504,
+ "step": 1031
+ },
+ {
+ "epoch": 3.2254697286012526,
+ "grad_norm": 0.8104662299156189,
+ "learning_rate": 2.391792612491796e-06,
+ "loss": 0.4327,
+ "step": 1032
+ },
+ {
+ "epoch": 3.2286012526096033,
+ "grad_norm": 0.8512858152389526,
+ "learning_rate": 2.387467186353293e-06,
+ "loss": 0.4506,
+ "step": 1033
+ },
+ {
+ "epoch": 3.231732776617954,
+ "grad_norm": 0.7987692952156067,
+ "learning_rate": 2.3831420977384715e-06,
+ "loss": 0.4257,
+ "step": 1034
+ },
+ {
+ "epoch": 3.2348643006263047,
+ "grad_norm": 0.7805537581443787,
+ "learning_rate": 2.3788173596197244e-06,
+ "loss": 0.4692,
+ "step": 1035
+ },
+ {
+ "epoch": 3.2379958246346554,
+ "grad_norm": 0.737304151058197,
+ "learning_rate": 2.374492984968392e-06,
+ "loss": 0.4308,
+ "step": 1036
+ },
+ {
+ "epoch": 3.241127348643006,
+ "grad_norm": 0.8113856315612793,
+ "learning_rate": 2.3701689867547247e-06,
+ "loss": 0.4668,
+ "step": 1037
+ },
+ {
+ "epoch": 3.244258872651357,
+ "grad_norm": 1.507103443145752,
+ "learning_rate": 2.3658453779478464e-06,
+ "loss": 0.4527,
+ "step": 1038
+ },
+ {
+ "epoch": 3.2473903966597075,
+ "grad_norm": 0.7973915338516235,
+ "learning_rate": 2.3615221715157095e-06,
+ "loss": 0.4741,
+ "step": 1039
+ },
+ {
+ "epoch": 3.2505219206680582,
+ "grad_norm": 0.8206940293312073,
+ "learning_rate": 2.3571993804250635e-06,
+ "loss": 0.4582,
+ "step": 1040
+ },
+ {
+ "epoch": 3.2536534446764094,
+ "grad_norm": 0.8519637584686279,
+ "learning_rate": 2.3528770176414076e-06,
+ "loss": 0.4412,
+ "step": 1041
+ },
+ {
+ "epoch": 3.25678496868476,
+ "grad_norm": 0.7542241811752319,
+ "learning_rate": 2.348555096128959e-06,
+ "loss": 0.4861,
+ "step": 1042
+ },
+ {
+ "epoch": 3.259916492693111,
+ "grad_norm": 0.8074842691421509,
+ "learning_rate": 2.3442336288506125e-06,
+ "loss": 0.4707,
+ "step": 1043
+ },
+ {
+ "epoch": 3.2630480167014615,
+ "grad_norm": 0.8164265751838684,
+ "learning_rate": 2.3399126287678975e-06,
+ "loss": 0.4417,
+ "step": 1044
+ },
+ {
+ "epoch": 3.266179540709812,
+ "grad_norm": 0.7689628005027771,
+ "learning_rate": 2.3355921088409435e-06,
+ "loss": 0.4815,
+ "step": 1045
+ },
+ {
+ "epoch": 3.269311064718163,
+ "grad_norm": 0.7709240913391113,
+ "learning_rate": 2.3312720820284423e-06,
+ "loss": 0.4444,
+ "step": 1046
+ },
+ {
+ "epoch": 3.2724425887265136,
+ "grad_norm": 0.7340330481529236,
+ "learning_rate": 2.326952561287602e-06,
+ "loss": 0.4694,
+ "step": 1047
+ },
+ {
+ "epoch": 3.2755741127348643,
+ "grad_norm": 0.7785805463790894,
+ "learning_rate": 2.3226335595741154e-06,
+ "loss": 0.4273,
+ "step": 1048
+ },
+ {
+ "epoch": 3.278705636743215,
+ "grad_norm": 0.9968108534812927,
+ "learning_rate": 2.3183150898421196e-06,
+ "loss": 0.4782,
+ "step": 1049
+ },
+ {
+ "epoch": 3.2818371607515657,
+ "grad_norm": 0.7823553681373596,
+ "learning_rate": 2.3139971650441533e-06,
+ "loss": 0.444,
+ "step": 1050
+ },
+ {
+ "epoch": 3.2849686847599164,
+ "grad_norm": 0.7317377924919128,
+ "learning_rate": 2.3096797981311252e-06,
+ "loss": 0.4692,
+ "step": 1051
+ },
+ {
+ "epoch": 3.288100208768267,
+ "grad_norm": 0.8546518683433533,
+ "learning_rate": 2.3053630020522643e-06,
+ "loss": 0.4632,
+ "step": 1052
+ },
+ {
+ "epoch": 3.291231732776618,
+ "grad_norm": 1.2284396886825562,
+ "learning_rate": 2.301046789755093e-06,
+ "loss": 0.4409,
+ "step": 1053
+ },
+ {
+ "epoch": 3.2943632567849686,
+ "grad_norm": 0.8000460863113403,
+ "learning_rate": 2.2967311741853797e-06,
+ "loss": 0.4456,
+ "step": 1054
+ },
+ {
+ "epoch": 3.2974947807933193,
+ "grad_norm": 0.7689793109893799,
+ "learning_rate": 2.2924161682871053e-06,
+ "loss": 0.45,
+ "step": 1055
+ },
+ {
+ "epoch": 3.30062630480167,
+ "grad_norm": 0.8032956719398499,
+ "learning_rate": 2.288101785002421e-06,
+ "loss": 0.4817,
+ "step": 1056
+ },
+ {
+ "epoch": 3.3037578288100207,
+ "grad_norm": 0.6831309795379639,
+ "learning_rate": 2.283788037271609e-06,
+ "loss": 0.4502,
+ "step": 1057
+ },
+ {
+ "epoch": 3.306889352818372,
+ "grad_norm": 0.8581221103668213,
+ "learning_rate": 2.279474938033048e-06,
+ "loss": 0.4569,
+ "step": 1058
+ },
+ {
+ "epoch": 3.3100208768267225,
+ "grad_norm": 0.7937221527099609,
+ "learning_rate": 2.2751625002231696e-06,
+ "loss": 0.4451,
+ "step": 1059
+ },
+ {
+ "epoch": 3.3131524008350732,
+ "grad_norm": 0.8095264434814453,
+ "learning_rate": 2.270850736776422e-06,
+ "loss": 0.4462,
+ "step": 1060
+ },
+ {
+ "epoch": 3.316283924843424,
+ "grad_norm": 0.9141370058059692,
+ "learning_rate": 2.2665396606252332e-06,
+ "loss": 0.419,
+ "step": 1061
+ },
+ {
+ "epoch": 3.3194154488517746,
+ "grad_norm": 0.8648553490638733,
+ "learning_rate": 2.262229284699965e-06,
+ "loss": 0.4562,
+ "step": 1062
+ },
+ {
+ "epoch": 3.3225469728601253,
+ "grad_norm": 0.7716917395591736,
+ "learning_rate": 2.2579196219288825e-06,
+ "loss": 0.4734,
+ "step": 1063
+ },
+ {
+ "epoch": 3.325678496868476,
+ "grad_norm": 0.8074535727500916,
+ "learning_rate": 2.2536106852381116e-06,
+ "loss": 0.4272,
+ "step": 1064
+ },
+ {
+ "epoch": 3.3288100208768268,
+ "grad_norm": 0.8989127278327942,
+ "learning_rate": 2.249302487551599e-06,
+ "loss": 0.4728,
+ "step": 1065
+ },
+ {
+ "epoch": 3.3319415448851775,
+ "grad_norm": 0.7662765383720398,
+ "learning_rate": 2.2449950417910777e-06,
+ "loss": 0.4457,
+ "step": 1066
+ },
+ {
+ "epoch": 3.335073068893528,
+ "grad_norm": 0.7789275050163269,
+ "learning_rate": 2.24068836087602e-06,
+ "loss": 0.3919,
+ "step": 1067
+ },
+ {
+ "epoch": 3.338204592901879,
+ "grad_norm": 0.7359098196029663,
+ "learning_rate": 2.2363824577236097e-06,
+ "loss": 0.4876,
+ "step": 1068
+ },
+ {
+ "epoch": 3.3413361169102296,
+ "grad_norm": 0.9960948824882507,
+ "learning_rate": 2.232077345248695e-06,
+ "loss": 0.4894,
+ "step": 1069
+ },
+ {
+ "epoch": 3.3444676409185803,
+ "grad_norm": 0.7446064352989197,
+ "learning_rate": 2.2277730363637537e-06,
+ "loss": 0.4765,
+ "step": 1070
+ },
+ {
+ "epoch": 3.347599164926931,
+ "grad_norm": 0.7674328088760376,
+ "learning_rate": 2.2234695439788534e-06,
+ "loss": 0.4468,
+ "step": 1071
+ },
+ {
+ "epoch": 3.3507306889352817,
+ "grad_norm": 0.8957347869873047,
+ "learning_rate": 2.2191668810016105e-06,
+ "loss": 0.4733,
+ "step": 1072
+ },
+ {
+ "epoch": 3.3538622129436324,
+ "grad_norm": 0.9110277891159058,
+ "learning_rate": 2.2148650603371573e-06,
+ "loss": 0.4399,
+ "step": 1073
+ },
+ {
+ "epoch": 3.356993736951983,
+ "grad_norm": 0.9158220291137695,
+ "learning_rate": 2.2105640948880976e-06,
+ "loss": 0.4609,
+ "step": 1074
+ },
+ {
+ "epoch": 3.3601252609603343,
+ "grad_norm": 0.7630184888839722,
+ "learning_rate": 2.206263997554469e-06,
+ "loss": 0.4674,
+ "step": 1075
+ },
+ {
+ "epoch": 3.3632567849686845,
+ "grad_norm": 0.7975273728370667,
+ "learning_rate": 2.20196478123371e-06,
+ "loss": 0.4478,
+ "step": 1076
+ },
+ {
+ "epoch": 3.3663883089770357,
+ "grad_norm": 0.8825351595878601,
+ "learning_rate": 2.19766645882061e-06,
+ "loss": 0.4687,
+ "step": 1077
+ },
+ {
+ "epoch": 3.3695198329853864,
+ "grad_norm": 0.8907671570777893,
+ "learning_rate": 2.1933690432072817e-06,
+ "loss": 0.4223,
+ "step": 1078
+ },
+ {
+ "epoch": 3.372651356993737,
+ "grad_norm": 0.7449545860290527,
+ "learning_rate": 2.189072547283118e-06,
+ "loss": 0.4595,
+ "step": 1079
+ },
+ {
+ "epoch": 3.375782881002088,
+ "grad_norm": 0.8460972309112549,
+ "learning_rate": 2.184776983934751e-06,
+ "loss": 0.4443,
+ "step": 1080
+ },
+ {
+ "epoch": 3.3789144050104385,
+ "grad_norm": 0.7524845600128174,
+ "learning_rate": 2.1804823660460196e-06,
+ "loss": 0.4235,
+ "step": 1081
+ },
+ {
+ "epoch": 3.382045929018789,
+ "grad_norm": 0.8448389768600464,
+ "learning_rate": 2.176188706497921e-06,
+ "loss": 0.4387,
+ "step": 1082
+ },
+ {
+ "epoch": 3.38517745302714,
+ "grad_norm": 0.7701981663703918,
+ "learning_rate": 2.1718960181685838e-06,
+ "loss": 0.4257,
+ "step": 1083
+ },
+ {
+ "epoch": 3.3883089770354906,
+ "grad_norm": 0.8178983330726624,
+ "learning_rate": 2.167604313933219e-06,
+ "loss": 0.4983,
+ "step": 1084
+ },
+ {
+ "epoch": 3.3914405010438413,
+ "grad_norm": 0.7477235198020935,
+ "learning_rate": 2.163313606664091e-06,
+ "loss": 0.4559,
+ "step": 1085
+ },
+ {
+ "epoch": 3.394572025052192,
+ "grad_norm": 0.8127962350845337,
+ "learning_rate": 2.1590239092304694e-06,
+ "loss": 0.453,
+ "step": 1086
+ },
+ {
+ "epoch": 3.3977035490605427,
+ "grad_norm": 0.7462339997291565,
+ "learning_rate": 2.1547352344985966e-06,
+ "loss": 0.4697,
+ "step": 1087
+ },
+ {
+ "epoch": 3.4008350730688934,
+ "grad_norm": 0.9641384482383728,
+ "learning_rate": 2.1504475953316483e-06,
+ "loss": 0.4495,
+ "step": 1088
+ },
+ {
+ "epoch": 3.403966597077244,
+ "grad_norm": 0.7612512707710266,
+ "learning_rate": 2.146161004589693e-06,
+ "loss": 0.4579,
+ "step": 1089
+ },
+ {
+ "epoch": 3.407098121085595,
+ "grad_norm": 0.7547829747200012,
+ "learning_rate": 2.141875475129655e-06,
+ "loss": 0.4334,
+ "step": 1090
+ },
+ {
+ "epoch": 3.4102296450939455,
+ "grad_norm": 0.8036953806877136,
+ "learning_rate": 2.137591019805278e-06,
+ "loss": 0.4466,
+ "step": 1091
+ },
+ {
+ "epoch": 3.4133611691022967,
+ "grad_norm": 0.7319284081459045,
+ "learning_rate": 2.1333076514670784e-06,
+ "loss": 0.4942,
+ "step": 1092
+ },
+ {
+ "epoch": 3.416492693110647,
+ "grad_norm": 0.8278589248657227,
+ "learning_rate": 2.1290253829623165e-06,
+ "loss": 0.4554,
+ "step": 1093
+ },
+ {
+ "epoch": 3.419624217118998,
+ "grad_norm": 0.733059287071228,
+ "learning_rate": 2.124744227134954e-06,
+ "loss": 0.4187,
+ "step": 1094
+ },
+ {
+ "epoch": 3.422755741127349,
+ "grad_norm": 0.8222727179527283,
+ "learning_rate": 2.1204641968256136e-06,
+ "loss": 0.4587,
+ "step": 1095
+ },
+ {
+ "epoch": 3.4258872651356995,
+ "grad_norm": 0.8296732902526855,
+ "learning_rate": 2.1161853048715438e-06,
+ "loss": 0.4868,
+ "step": 1096
+ },
+ {
+ "epoch": 3.4290187891440502,
+ "grad_norm": 0.7309690713882446,
+ "learning_rate": 2.1119075641065758e-06,
+ "loss": 0.4594,
+ "step": 1097
+ },
+ {
+ "epoch": 3.432150313152401,
+ "grad_norm": 1.4901788234710693,
+ "learning_rate": 2.1076309873610916e-06,
+ "loss": 0.4216,
+ "step": 1098
+ },
+ {
+ "epoch": 3.4352818371607516,
+ "grad_norm": 0.7993581891059875,
+ "learning_rate": 2.1033555874619794e-06,
+ "loss": 0.4842,
+ "step": 1099
+ },
+ {
+ "epoch": 3.4384133611691023,
+ "grad_norm": 0.8846752643585205,
+ "learning_rate": 2.0990813772325995e-06,
+ "loss": 0.4395,
+ "step": 1100
+ },
+ {
+ "epoch": 3.441544885177453,
+ "grad_norm": 1.0796778202056885,
+ "learning_rate": 2.0948083694927436e-06,
+ "loss": 0.4573,
+ "step": 1101
+ },
+ {
+ "epoch": 3.4446764091858038,
+ "grad_norm": 0.74623042345047,
+ "learning_rate": 2.090536577058595e-06,
+ "loss": 0.4563,
+ "step": 1102
+ },
+ {
+ "epoch": 3.4478079331941545,
+ "grad_norm": 0.8245521783828735,
+ "learning_rate": 2.086266012742692e-06,
+ "loss": 0.4433,
+ "step": 1103
+ },
+ {
+ "epoch": 3.450939457202505,
+ "grad_norm": 0.8003777265548706,
+ "learning_rate": 2.081996689353893e-06,
+ "loss": 0.4599,
+ "step": 1104
+ },
+ {
+ "epoch": 3.454070981210856,
+ "grad_norm": 0.8309001922607422,
+ "learning_rate": 2.0777286196973302e-06,
+ "loss": 0.485,
+ "step": 1105
+ },
+ {
+ "epoch": 3.4572025052192066,
+ "grad_norm": 0.8299122452735901,
+ "learning_rate": 2.0734618165743782e-06,
+ "loss": 0.4685,
+ "step": 1106
+ },
+ {
+ "epoch": 3.4603340292275573,
+ "grad_norm": 0.9347029328346252,
+ "learning_rate": 2.069196292782611e-06,
+ "loss": 0.4615,
+ "step": 1107
+ },
+ {
+ "epoch": 3.463465553235908,
+ "grad_norm": 0.7146593332290649,
+ "learning_rate": 2.064932061115766e-06,
+ "loss": 0.4433,
+ "step": 1108
+ },
+ {
+ "epoch": 3.4665970772442587,
+ "grad_norm": 0.7674420475959778,
+ "learning_rate": 2.0606691343637063e-06,
+ "loss": 0.4444,
+ "step": 1109
+ },
+ {
+ "epoch": 3.4697286012526094,
+ "grad_norm": 0.7925504446029663,
+ "learning_rate": 2.05640752531238e-06,
+ "loss": 0.4631,
+ "step": 1110
+ },
+ {
+ "epoch": 3.4728601252609606,
+ "grad_norm": 0.7755677700042725,
+ "learning_rate": 2.0521472467437825e-06,
+ "loss": 0.4709,
+ "step": 1111
+ },
+ {
+ "epoch": 3.4759916492693113,
+ "grad_norm": 0.8535795211791992,
+ "learning_rate": 2.0478883114359187e-06,
+ "loss": 0.442,
+ "step": 1112
+ },
+ {
+ "epoch": 3.479123173277662,
+ "grad_norm": 0.723953127861023,
+ "learning_rate": 2.043630732162767e-06,
+ "loss": 0.4782,
+ "step": 1113
+ },
+ {
+ "epoch": 3.4822546972860127,
+ "grad_norm": 0.7817316651344299,
+ "learning_rate": 2.0393745216942343e-06,
+ "loss": 0.4841,
+ "step": 1114
+ },
+ {
+ "epoch": 3.4853862212943634,
+ "grad_norm": 0.8878781795501709,
+ "learning_rate": 2.0351196927961268e-06,
+ "loss": 0.4673,
+ "step": 1115
+ },
+ {
+ "epoch": 3.488517745302714,
+ "grad_norm": 0.8645241856575012,
+ "learning_rate": 2.030866258230104e-06,
+ "loss": 0.432,
+ "step": 1116
+ },
+ {
+ "epoch": 3.491649269311065,
+ "grad_norm": 0.7294583320617676,
+ "learning_rate": 2.026614230753643e-06,
+ "loss": 0.4683,
+ "step": 1117
+ },
+ {
+ "epoch": 3.4947807933194155,
+ "grad_norm": 0.7412407994270325,
+ "learning_rate": 2.022363623120001e-06,
+ "loss": 0.4523,
+ "step": 1118
+ },
+ {
+ "epoch": 3.497912317327766,
+ "grad_norm": 0.8559291362762451,
+ "learning_rate": 2.0181144480781787e-06,
+ "loss": 0.4309,
+ "step": 1119
+ },
+ {
+ "epoch": 3.501043841336117,
+ "grad_norm": 0.7442825436592102,
+ "learning_rate": 2.0138667183728775e-06,
+ "loss": 0.4096,
+ "step": 1120
+ },
+ {
+ "epoch": 3.5041753653444676,
+ "grad_norm": 0.7605662941932678,
+ "learning_rate": 2.0096204467444645e-06,
+ "loss": 0.4404,
+ "step": 1121
+ },
+ {
+ "epoch": 3.5073068893528183,
+ "grad_norm": 0.7984277009963989,
+ "learning_rate": 2.005375645928935e-06,
+ "loss": 0.4661,
+ "step": 1122
+ },
+ {
+ "epoch": 3.510438413361169,
+ "grad_norm": 1.1044552326202393,
+ "learning_rate": 2.001132328657869e-06,
+ "loss": 0.4185,
+ "step": 1123
+ },
+ {
+ "epoch": 3.5135699373695197,
+ "grad_norm": 0.8210328817367554,
+ "learning_rate": 1.996890507658401e-06,
+ "loss": 0.4746,
+ "step": 1124
+ },
+ {
+ "epoch": 3.5167014613778704,
+ "grad_norm": 0.7302148342132568,
+ "learning_rate": 1.9926501956531758e-06,
+ "loss": 0.4333,
+ "step": 1125
+ },
+ {
+ "epoch": 3.519832985386221,
+ "grad_norm": 0.7713826894760132,
+ "learning_rate": 1.9884114053603114e-06,
+ "loss": 0.4485,
+ "step": 1126
+ },
+ {
+ "epoch": 3.522964509394572,
+ "grad_norm": 0.7386549711227417,
+ "learning_rate": 1.984174149493365e-06,
+ "loss": 0.4678,
+ "step": 1127
+ },
+ {
+ "epoch": 3.526096033402923,
+ "grad_norm": 0.8006004095077515,
+ "learning_rate": 1.979938440761287e-06,
+ "loss": 0.4755,
+ "step": 1128
+ },
+ {
+ "epoch": 3.5292275574112733,
+ "grad_norm": 1.0635333061218262,
+ "learning_rate": 1.97570429186839e-06,
+ "loss": 0.4985,
+ "step": 1129
+ },
+ {
+ "epoch": 3.5323590814196244,
+ "grad_norm": 0.7470075488090515,
+ "learning_rate": 1.9714717155143083e-06,
+ "loss": 0.4307,
+ "step": 1130
+ },
+ {
+ "epoch": 3.535490605427975,
+ "grad_norm": 0.8314558863639832,
+ "learning_rate": 1.967240724393959e-06,
+ "loss": 0.4894,
+ "step": 1131
+ },
+ {
+ "epoch": 3.538622129436326,
+ "grad_norm": 0.761443018913269,
+ "learning_rate": 1.963011331197506e-06,
+ "loss": 0.4653,
+ "step": 1132
+ },
+ {
+ "epoch": 3.5417536534446765,
+ "grad_norm": 0.7483212351799011,
+ "learning_rate": 1.9587835486103163e-06,
+ "loss": 0.4456,
+ "step": 1133
+ },
+ {
+ "epoch": 3.5448851774530272,
+ "grad_norm": 0.7794159054756165,
+ "learning_rate": 1.9545573893129306e-06,
+ "loss": 0.4707,
+ "step": 1134
+ },
+ {
+ "epoch": 3.548016701461378,
+ "grad_norm": 0.8016185760498047,
+ "learning_rate": 1.950332865981019e-06,
+ "loss": 0.4547,
+ "step": 1135
+ },
+ {
+ "epoch": 3.5511482254697286,
+ "grad_norm": 0.8089869618415833,
+ "learning_rate": 1.9461099912853453e-06,
+ "loss": 0.4499,
+ "step": 1136
+ },
+ {
+ "epoch": 3.5542797494780793,
+ "grad_norm": 0.7774782180786133,
+ "learning_rate": 1.9418887778917286e-06,
+ "loss": 0.4531,
+ "step": 1137
+ },
+ {
+ "epoch": 3.55741127348643,
+ "grad_norm": 0.7793645262718201,
+ "learning_rate": 1.937669238461003e-06,
+ "loss": 0.4553,
+ "step": 1138
+ },
+ {
+ "epoch": 3.5605427974947808,
+ "grad_norm": 0.8139959573745728,
+ "learning_rate": 1.933451385648985e-06,
+ "loss": 0.458,
+ "step": 1139
+ },
+ {
+ "epoch": 3.5636743215031315,
+ "grad_norm": 0.7517053484916687,
+ "learning_rate": 1.929235232106431e-06,
+ "loss": 0.4779,
+ "step": 1140
+ },
+ {
+ "epoch": 3.566805845511482,
+ "grad_norm": 0.8851562142372131,
+ "learning_rate": 1.925020790479e-06,
+ "loss": 0.425,
+ "step": 1141
+ },
+ {
+ "epoch": 3.569937369519833,
+ "grad_norm": 0.8129401803016663,
+ "learning_rate": 1.920808073407218e-06,
+ "loss": 0.4616,
+ "step": 1142
+ },
+ {
+ "epoch": 3.5730688935281836,
+ "grad_norm": 0.7110117077827454,
+ "learning_rate": 1.916597093526437e-06,
+ "loss": 0.4748,
+ "step": 1143
+ },
+ {
+ "epoch": 3.5762004175365343,
+ "grad_norm": 0.8268555402755737,
+ "learning_rate": 1.912387863466798e-06,
+ "loss": 0.4752,
+ "step": 1144
+ },
+ {
+ "epoch": 3.5793319415448854,
+ "grad_norm": 1.1036733388900757,
+ "learning_rate": 1.9081803958531967e-06,
+ "loss": 0.4879,
+ "step": 1145
+ },
+ {
+ "epoch": 3.5824634655532357,
+ "grad_norm": 0.8561109304428101,
+ "learning_rate": 1.9039747033052395e-06,
+ "loss": 0.4409,
+ "step": 1146
+ },
+ {
+ "epoch": 3.585594989561587,
+ "grad_norm": 0.7597541809082031,
+ "learning_rate": 1.8997707984372119e-06,
+ "loss": 0.4518,
+ "step": 1147
+ },
+ {
+ "epoch": 3.588726513569937,
+ "grad_norm": 0.7225353121757507,
+ "learning_rate": 1.8955686938580329e-06,
+ "loss": 0.4735,
+ "step": 1148
+ },
+ {
+ "epoch": 3.5918580375782883,
+ "grad_norm": 0.9409791231155396,
+ "learning_rate": 1.8913684021712264e-06,
+ "loss": 0.4249,
+ "step": 1149
+ },
+ {
+ "epoch": 3.594989561586639,
+ "grad_norm": 0.8757275342941284,
+ "learning_rate": 1.8871699359748763e-06,
+ "loss": 0.4479,
+ "step": 1150
+ },
+ {
+ "epoch": 3.5981210855949897,
+ "grad_norm": 0.8090003728866577,
+ "learning_rate": 1.882973307861593e-06,
+ "loss": 0.4846,
+ "step": 1151
+ },
+ {
+ "epoch": 3.6012526096033404,
+ "grad_norm": 0.7568825483322144,
+ "learning_rate": 1.8787785304184726e-06,
+ "loss": 0.4301,
+ "step": 1152
+ },
+ {
+ "epoch": 3.604384133611691,
+ "grad_norm": 0.8233815431594849,
+ "learning_rate": 1.8745856162270592e-06,
+ "loss": 0.4838,
+ "step": 1153
+ },
+ {
+ "epoch": 3.607515657620042,
+ "grad_norm": 0.7817628979682922,
+ "learning_rate": 1.8703945778633121e-06,
+ "loss": 0.4669,
+ "step": 1154
+ },
+ {
+ "epoch": 3.6106471816283925,
+ "grad_norm": 0.821304202079773,
+ "learning_rate": 1.8662054278975605e-06,
+ "loss": 0.4536,
+ "step": 1155
+ },
+ {
+ "epoch": 3.613778705636743,
+ "grad_norm": 0.9304089546203613,
+ "learning_rate": 1.8620181788944712e-06,
+ "loss": 0.4489,
+ "step": 1156
+ },
+ {
+ "epoch": 3.616910229645094,
+ "grad_norm": 0.8202670216560364,
+ "learning_rate": 1.8578328434130114e-06,
+ "loss": 0.4309,
+ "step": 1157
+ },
+ {
+ "epoch": 3.6200417536534446,
+ "grad_norm": 0.8890257477760315,
+ "learning_rate": 1.8536494340064051e-06,
+ "loss": 0.4736,
+ "step": 1158
+ },
+ {
+ "epoch": 3.6231732776617953,
+ "grad_norm": 0.7940590381622314,
+ "learning_rate": 1.8494679632221013e-06,
+ "loss": 0.4468,
+ "step": 1159
+ },
+ {
+ "epoch": 3.626304801670146,
+ "grad_norm": 0.8388273119926453,
+ "learning_rate": 1.845288443601736e-06,
+ "loss": 0.4753,
+ "step": 1160
+ },
+ {
+ "epoch": 3.6294363256784967,
+ "grad_norm": 0.9392285346984863,
+ "learning_rate": 1.84111088768109e-06,
+ "loss": 0.4736,
+ "step": 1161
+ },
+ {
+ "epoch": 3.632567849686848,
+ "grad_norm": 0.7411681413650513,
+ "learning_rate": 1.8369353079900576e-06,
+ "loss": 0.4303,
+ "step": 1162
+ },
+ {
+ "epoch": 3.635699373695198,
+ "grad_norm": 0.8722569942474365,
+ "learning_rate": 1.8327617170526014e-06,
+ "loss": 0.4604,
+ "step": 1163
+ },
+ {
+ "epoch": 3.6388308977035493,
+ "grad_norm": 0.786891758441925,
+ "learning_rate": 1.8285901273867229e-06,
+ "loss": 0.4756,
+ "step": 1164
+ },
+ {
+ "epoch": 3.6419624217118995,
+ "grad_norm": 0.8159083724021912,
+ "learning_rate": 1.824420551504419e-06,
+ "loss": 0.4675,
+ "step": 1165
+ },
+ {
+ "epoch": 3.6450939457202507,
+ "grad_norm": 0.8271334767341614,
+ "learning_rate": 1.8202530019116487e-06,
+ "loss": 0.4311,
+ "step": 1166
+ },
+ {
+ "epoch": 3.6482254697286014,
+ "grad_norm": 0.7617189288139343,
+ "learning_rate": 1.816087491108292e-06,
+ "loss": 0.4522,
+ "step": 1167
+ },
+ {
+ "epoch": 3.651356993736952,
+ "grad_norm": 0.7248172760009766,
+ "learning_rate": 1.8119240315881126e-06,
+ "loss": 0.465,
+ "step": 1168
+ },
+ {
+ "epoch": 3.654488517745303,
+ "grad_norm": 0.8606911897659302,
+ "learning_rate": 1.8077626358387235e-06,
+ "loss": 0.4524,
+ "step": 1169
+ },
+ {
+ "epoch": 3.6576200417536535,
+ "grad_norm": 0.8571308851242065,
+ "learning_rate": 1.8036033163415484e-06,
+ "loss": 0.4625,
+ "step": 1170
+ },
+ {
+ "epoch": 3.6607515657620042,
+ "grad_norm": 0.7239511609077454,
+ "learning_rate": 1.7994460855717812e-06,
+ "loss": 0.5025,
+ "step": 1171
+ },
+ {
+ "epoch": 3.663883089770355,
+ "grad_norm": 0.7958929538726807,
+ "learning_rate": 1.7952909559983544e-06,
+ "loss": 0.4382,
+ "step": 1172
+ },
+ {
+ "epoch": 3.6670146137787056,
+ "grad_norm": 0.7920124530792236,
+ "learning_rate": 1.7911379400838947e-06,
+ "loss": 0.4393,
+ "step": 1173
+ },
+ {
+ "epoch": 3.6701461377870563,
+ "grad_norm": 0.8072578310966492,
+ "learning_rate": 1.7869870502846903e-06,
+ "loss": 0.4627,
+ "step": 1174
+ },
+ {
+ "epoch": 3.673277661795407,
+ "grad_norm": 0.8586218357086182,
+ "learning_rate": 1.7828382990506543e-06,
+ "loss": 0.4456,
+ "step": 1175
+ },
+ {
+ "epoch": 3.6764091858037578,
+ "grad_norm": 0.8741613030433655,
+ "learning_rate": 1.7786916988252845e-06,
+ "loss": 0.4613,
+ "step": 1176
+ },
+ {
+ "epoch": 3.6795407098121085,
+ "grad_norm": 0.7691352367401123,
+ "learning_rate": 1.774547262045626e-06,
+ "loss": 0.4641,
+ "step": 1177
+ },
+ {
+ "epoch": 3.682672233820459,
+ "grad_norm": 0.7866089940071106,
+ "learning_rate": 1.7704050011422357e-06,
+ "loss": 0.4308,
+ "step": 1178
+ },
+ {
+ "epoch": 3.68580375782881,
+ "grad_norm": 0.9934884309768677,
+ "learning_rate": 1.7662649285391447e-06,
+ "loss": 0.4434,
+ "step": 1179
+ },
+ {
+ "epoch": 3.6889352818371606,
+ "grad_norm": 0.794385552406311,
+ "learning_rate": 1.7621270566538204e-06,
+ "loss": 0.4481,
+ "step": 1180
+ },
+ {
+ "epoch": 3.6920668058455117,
+ "grad_norm": 0.7573548555374146,
+ "learning_rate": 1.7579913978971296e-06,
+ "loss": 0.4525,
+ "step": 1181
+ },
+ {
+ "epoch": 3.695198329853862,
+ "grad_norm": 0.7073976993560791,
+ "learning_rate": 1.7538579646733023e-06,
+ "loss": 0.4214,
+ "step": 1182
+ },
+ {
+ "epoch": 3.698329853862213,
+ "grad_norm": 0.8009579181671143,
+ "learning_rate": 1.7497267693798902e-06,
+ "loss": 0.4542,
+ "step": 1183
+ },
+ {
+ "epoch": 3.701461377870564,
+ "grad_norm": 1.4488778114318848,
+ "learning_rate": 1.7455978244077348e-06,
+ "loss": 0.443,
+ "step": 1184
+ },
+ {
+ "epoch": 3.7045929018789145,
+ "grad_norm": 1.0529266595840454,
+ "learning_rate": 1.7414711421409292e-06,
+ "loss": 0.4908,
+ "step": 1185
+ },
+ {
+ "epoch": 3.7077244258872653,
+ "grad_norm": 0.757431149482727,
+ "learning_rate": 1.7373467349567775e-06,
+ "loss": 0.4454,
+ "step": 1186
+ },
+ {
+ "epoch": 3.710855949895616,
+ "grad_norm": 0.7086379528045654,
+ "learning_rate": 1.733224615225763e-06,
+ "loss": 0.4292,
+ "step": 1187
+ },
+ {
+ "epoch": 3.7139874739039667,
+ "grad_norm": 0.7454110383987427,
+ "learning_rate": 1.7291047953115049e-06,
+ "loss": 0.4421,
+ "step": 1188
+ },
+ {
+ "epoch": 3.7171189979123174,
+ "grad_norm": 0.804027795791626,
+ "learning_rate": 1.7249872875707257e-06,
+ "loss": 0.4819,
+ "step": 1189
+ },
+ {
+ "epoch": 3.720250521920668,
+ "grad_norm": 0.8159645199775696,
+ "learning_rate": 1.7208721043532146e-06,
+ "loss": 0.4628,
+ "step": 1190
+ },
+ {
+ "epoch": 3.723382045929019,
+ "grad_norm": 0.8451672196388245,
+ "learning_rate": 1.7167592580017866e-06,
+ "loss": 0.4773,
+ "step": 1191
+ },
+ {
+ "epoch": 3.7265135699373695,
+ "grad_norm": 0.920553982257843,
+ "learning_rate": 1.7126487608522492e-06,
+ "loss": 0.4787,
+ "step": 1192
+ },
+ {
+ "epoch": 3.72964509394572,
+ "grad_norm": 0.9169708490371704,
+ "learning_rate": 1.7085406252333613e-06,
+ "loss": 0.4543,
+ "step": 1193
+ },
+ {
+ "epoch": 3.732776617954071,
+ "grad_norm": 0.7245096564292908,
+ "learning_rate": 1.7044348634668023e-06,
+ "loss": 0.4599,
+ "step": 1194
+ },
+ {
+ "epoch": 3.7359081419624216,
+ "grad_norm": 0.835832417011261,
+ "learning_rate": 1.7003314878671284e-06,
+ "loss": 0.4794,
+ "step": 1195
+ },
+ {
+ "epoch": 3.7390396659707723,
+ "grad_norm": 0.8455896973609924,
+ "learning_rate": 1.696230510741742e-06,
+ "loss": 0.4329,
+ "step": 1196
+ },
+ {
+ "epoch": 3.742171189979123,
+ "grad_norm": 0.743611752986908,
+ "learning_rate": 1.692131944390849e-06,
+ "loss": 0.4455,
+ "step": 1197
+ },
+ {
+ "epoch": 3.745302713987474,
+ "grad_norm": 1.1334915161132812,
+ "learning_rate": 1.6880358011074272e-06,
+ "loss": 0.4673,
+ "step": 1198
+ },
+ {
+ "epoch": 3.7484342379958244,
+ "grad_norm": 0.6935724020004272,
+ "learning_rate": 1.6839420931771828e-06,
+ "loss": 0.4686,
+ "step": 1199
+ },
+ {
+ "epoch": 3.7515657620041756,
+ "grad_norm": 0.962006151676178,
+ "learning_rate": 1.6798508328785213e-06,
+ "loss": 0.4474,
+ "step": 1200
+ },
+ {
+ "epoch": 3.754697286012526,
+ "grad_norm": 1.1499714851379395,
+ "learning_rate": 1.6757620324825047e-06,
+ "loss": 0.4659,
+ "step": 1201
+ },
+ {
+ "epoch": 3.757828810020877,
+ "grad_norm": 0.7689645886421204,
+ "learning_rate": 1.6716757042528192e-06,
+ "loss": 0.4552,
+ "step": 1202
+ },
+ {
+ "epoch": 3.7609603340292277,
+ "grad_norm": 0.7482030391693115,
+ "learning_rate": 1.6675918604457352e-06,
+ "loss": 0.4747,
+ "step": 1203
+ },
+ {
+ "epoch": 3.7640918580375784,
+ "grad_norm": 0.7727032899856567,
+ "learning_rate": 1.6635105133100686e-06,
+ "loss": 0.4508,
+ "step": 1204
+ },
+ {
+ "epoch": 3.767223382045929,
+ "grad_norm": 0.8722149133682251,
+ "learning_rate": 1.6594316750871514e-06,
+ "loss": 0.4685,
+ "step": 1205
+ },
+ {
+ "epoch": 3.77035490605428,
+ "grad_norm": 0.771304726600647,
+ "learning_rate": 1.6553553580107884e-06,
+ "loss": 0.4418,
+ "step": 1206
+ },
+ {
+ "epoch": 3.7734864300626305,
+ "grad_norm": 0.767315149307251,
+ "learning_rate": 1.6512815743072214e-06,
+ "loss": 0.4532,
+ "step": 1207
+ },
+ {
+ "epoch": 3.776617954070981,
+ "grad_norm": 0.8825518488883972,
+ "learning_rate": 1.6472103361950976e-06,
+ "loss": 0.468,
+ "step": 1208
+ },
+ {
+ "epoch": 3.779749478079332,
+ "grad_norm": 0.8887981176376343,
+ "learning_rate": 1.6431416558854243e-06,
+ "loss": 0.4264,
+ "step": 1209
+ },
+ {
+ "epoch": 3.7828810020876826,
+ "grad_norm": 0.8399733304977417,
+ "learning_rate": 1.63907554558154e-06,
+ "loss": 0.4405,
+ "step": 1210
+ },
+ {
+ "epoch": 3.7860125260960333,
+ "grad_norm": 0.8112586140632629,
+ "learning_rate": 1.6350120174790751e-06,
+ "loss": 0.445,
+ "step": 1211
+ },
+ {
+ "epoch": 3.789144050104384,
+ "grad_norm": 0.860775351524353,
+ "learning_rate": 1.6309510837659137e-06,
+ "loss": 0.4557,
+ "step": 1212
+ },
+ {
+ "epoch": 3.7922755741127347,
+ "grad_norm": 0.8522343039512634,
+ "learning_rate": 1.626892756622161e-06,
+ "loss": 0.481,
+ "step": 1213
+ },
+ {
+ "epoch": 3.7954070981210855,
+ "grad_norm": 0.7927511930465698,
+ "learning_rate": 1.6228370482200988e-06,
+ "loss": 0.4328,
+ "step": 1214
+ },
+ {
+ "epoch": 3.798538622129436,
+ "grad_norm": 0.7350064516067505,
+ "learning_rate": 1.6187839707241604e-06,
+ "loss": 0.4604,
+ "step": 1215
+ },
+ {
+ "epoch": 3.801670146137787,
+ "grad_norm": 0.8363698124885559,
+ "learning_rate": 1.6147335362908847e-06,
+ "loss": 0.4271,
+ "step": 1216
+ },
+ {
+ "epoch": 3.804801670146138,
+ "grad_norm": 1.080613613128662,
+ "learning_rate": 1.610685757068885e-06,
+ "loss": 0.447,
+ "step": 1217
+ },
+ {
+ "epoch": 3.8079331941544883,
+ "grad_norm": 1.1507478952407837,
+ "learning_rate": 1.6066406451988104e-06,
+ "loss": 0.4664,
+ "step": 1218
+ },
+ {
+ "epoch": 3.8110647181628394,
+ "grad_norm": 0.7778187990188599,
+ "learning_rate": 1.6025982128133073e-06,
+ "loss": 0.456,
+ "step": 1219
+ },
+ {
+ "epoch": 3.81419624217119,
+ "grad_norm": 0.8383583426475525,
+ "learning_rate": 1.5985584720369876e-06,
+ "loss": 0.4684,
+ "step": 1220
+ },
+ {
+ "epoch": 3.817327766179541,
+ "grad_norm": 0.7743321061134338,
+ "learning_rate": 1.5945214349863914e-06,
+ "loss": 0.4567,
+ "step": 1221
+ },
+ {
+ "epoch": 3.8204592901878915,
+ "grad_norm": 0.8020774126052856,
+ "learning_rate": 1.5904871137699462e-06,
+ "loss": 0.4175,
+ "step": 1222
+ },
+ {
+ "epoch": 3.8235908141962422,
+ "grad_norm": 0.790318489074707,
+ "learning_rate": 1.5864555204879375e-06,
+ "loss": 0.469,
+ "step": 1223
+ },
+ {
+ "epoch": 3.826722338204593,
+ "grad_norm": 0.8583689332008362,
+ "learning_rate": 1.5824266672324652e-06,
+ "loss": 0.4931,
+ "step": 1224
+ },
+ {
+ "epoch": 3.8298538622129437,
+ "grad_norm": 0.7788206934928894,
+ "learning_rate": 1.5784005660874125e-06,
+ "loss": 0.4643,
+ "step": 1225
+ },
+ {
+ "epoch": 3.8329853862212944,
+ "grad_norm": 0.8385717868804932,
+ "learning_rate": 1.574377229128409e-06,
+ "loss": 0.4567,
+ "step": 1226
+ },
+ {
+ "epoch": 3.836116910229645,
+ "grad_norm": 0.8447727560997009,
+ "learning_rate": 1.5703566684227922e-06,
+ "loss": 0.42,
+ "step": 1227
+ },
+ {
+ "epoch": 3.8392484342379958,
+ "grad_norm": 0.7286496758460999,
+ "learning_rate": 1.5663388960295742e-06,
+ "loss": 0.4603,
+ "step": 1228
+ },
+ {
+ "epoch": 3.8423799582463465,
+ "grad_norm": 0.8493947982788086,
+ "learning_rate": 1.562323923999401e-06,
+ "loss": 0.4731,
+ "step": 1229
+ },
+ {
+ "epoch": 3.845511482254697,
+ "grad_norm": 0.8641151785850525,
+ "learning_rate": 1.5583117643745233e-06,
+ "loss": 0.4491,
+ "step": 1230
+ },
+ {
+ "epoch": 3.848643006263048,
+ "grad_norm": 0.9493702054023743,
+ "learning_rate": 1.5543024291887532e-06,
+ "loss": 0.411,
+ "step": 1231
+ },
+ {
+ "epoch": 3.8517745302713986,
+ "grad_norm": 0.7246205806732178,
+ "learning_rate": 1.5502959304674337e-06,
+ "loss": 0.4569,
+ "step": 1232
+ },
+ {
+ "epoch": 3.8549060542797493,
+ "grad_norm": 0.7576872110366821,
+ "learning_rate": 1.5462922802273994e-06,
+ "loss": 0.4548,
+ "step": 1233
+ },
+ {
+ "epoch": 3.8580375782881005,
+ "grad_norm": 0.7710747718811035,
+ "learning_rate": 1.5422914904769404e-06,
+ "loss": 0.447,
+ "step": 1234
+ },
+ {
+ "epoch": 3.8611691022964507,
+ "grad_norm": 0.7661204934120178,
+ "learning_rate": 1.5382935732157677e-06,
+ "loss": 0.4601,
+ "step": 1235
+ },
+ {
+ "epoch": 3.864300626304802,
+ "grad_norm": 0.7133070826530457,
+ "learning_rate": 1.5342985404349788e-06,
+ "loss": 0.4245,
+ "step": 1236
+ },
+ {
+ "epoch": 3.867432150313152,
+ "grad_norm": 0.7716459631919861,
+ "learning_rate": 1.5303064041170163e-06,
+ "loss": 0.4543,
+ "step": 1237
+ },
+ {
+ "epoch": 3.8705636743215033,
+ "grad_norm": 0.737501859664917,
+ "learning_rate": 1.5263171762356388e-06,
+ "loss": 0.4405,
+ "step": 1238
+ },
+ {
+ "epoch": 3.873695198329854,
+ "grad_norm": 0.7885998487472534,
+ "learning_rate": 1.5223308687558786e-06,
+ "loss": 0.4412,
+ "step": 1239
+ },
+ {
+ "epoch": 3.8768267223382047,
+ "grad_norm": 0.7516661882400513,
+ "learning_rate": 1.5183474936340092e-06,
+ "loss": 0.4185,
+ "step": 1240
+ },
+ {
+ "epoch": 3.8799582463465554,
+ "grad_norm": 0.8790446519851685,
+ "learning_rate": 1.5143670628175111e-06,
+ "loss": 0.444,
+ "step": 1241
+ },
+ {
+ "epoch": 3.883089770354906,
+ "grad_norm": 0.7695789337158203,
+ "learning_rate": 1.5103895882450315e-06,
+ "loss": 0.4615,
+ "step": 1242
+ },
+ {
+ "epoch": 3.886221294363257,
+ "grad_norm": 0.7859196662902832,
+ "learning_rate": 1.506415081846353e-06,
+ "loss": 0.4379,
+ "step": 1243
+ },
+ {
+ "epoch": 3.8893528183716075,
+ "grad_norm": 0.7771942615509033,
+ "learning_rate": 1.5024435555423522e-06,
+ "loss": 0.5013,
+ "step": 1244
+ },
+ {
+ "epoch": 3.892484342379958,
+ "grad_norm": 0.7415695190429688,
+ "learning_rate": 1.498475021244971e-06,
+ "loss": 0.4861,
+ "step": 1245
+ },
+ {
+ "epoch": 3.895615866388309,
+ "grad_norm": 0.7909391522407532,
+ "learning_rate": 1.4945094908571755e-06,
+ "loss": 0.4599,
+ "step": 1246
+ },
+ {
+ "epoch": 3.8987473903966596,
+ "grad_norm": 0.7749060988426208,
+ "learning_rate": 1.490546976272923e-06,
+ "loss": 0.3986,
+ "step": 1247
+ },
+ {
+ "epoch": 3.9018789144050103,
+ "grad_norm": 0.810681164264679,
+ "learning_rate": 1.4865874893771248e-06,
+ "loss": 0.4495,
+ "step": 1248
+ },
+ {
+ "epoch": 3.905010438413361,
+ "grad_norm": 0.8018531799316406,
+ "learning_rate": 1.4826310420456103e-06,
+ "loss": 0.4426,
+ "step": 1249
+ },
+ {
+ "epoch": 3.9081419624217117,
+ "grad_norm": 0.756064236164093,
+ "learning_rate": 1.4786776461450924e-06,
+ "loss": 0.4474,
+ "step": 1250
+ },
+ {
+ "epoch": 3.911273486430063,
+ "grad_norm": 0.7581740021705627,
+ "learning_rate": 1.4747273135331347e-06,
+ "loss": 0.4494,
+ "step": 1251
+ },
+ {
+ "epoch": 3.914405010438413,
+ "grad_norm": 0.7666076421737671,
+ "learning_rate": 1.4707800560581086e-06,
+ "loss": 0.4593,
+ "step": 1252
+ },
+ {
+ "epoch": 3.9175365344467643,
+ "grad_norm": 0.7339973449707031,
+ "learning_rate": 1.4668358855591664e-06,
+ "loss": 0.4682,
+ "step": 1253
+ },
+ {
+ "epoch": 3.9206680584551146,
+ "grad_norm": 0.8504599928855896,
+ "learning_rate": 1.4628948138661974e-06,
+ "loss": 0.4504,
+ "step": 1254
+ },
+ {
+ "epoch": 3.9237995824634657,
+ "grad_norm": 0.8332642912864685,
+ "learning_rate": 1.4589568527997985e-06,
+ "loss": 0.5024,
+ "step": 1255
+ },
+ {
+ "epoch": 3.9269311064718164,
+ "grad_norm": 0.7813694477081299,
+ "learning_rate": 1.4550220141712384e-06,
+ "loss": 0.4547,
+ "step": 1256
+ },
+ {
+ "epoch": 3.930062630480167,
+ "grad_norm": 0.842258632183075,
+ "learning_rate": 1.451090309782417e-06,
+ "loss": 0.4584,
+ "step": 1257
+ },
+ {
+ "epoch": 3.933194154488518,
+ "grad_norm": 0.8159133791923523,
+ "learning_rate": 1.4471617514258373e-06,
+ "loss": 0.4538,
+ "step": 1258
+ },
+ {
+ "epoch": 3.9363256784968685,
+ "grad_norm": 0.8117021322250366,
+ "learning_rate": 1.4432363508845626e-06,
+ "loss": 0.4315,
+ "step": 1259
+ },
+ {
+ "epoch": 3.9394572025052192,
+ "grad_norm": 0.8087465167045593,
+ "learning_rate": 1.4393141199321881e-06,
+ "loss": 0.4367,
+ "step": 1260
+ },
+ {
+ "epoch": 3.94258872651357,
+ "grad_norm": 0.7954697012901306,
+ "learning_rate": 1.435395070332801e-06,
+ "loss": 0.4515,
+ "step": 1261
+ },
+ {
+ "epoch": 3.9457202505219207,
+ "grad_norm": 0.7305286526679993,
+ "learning_rate": 1.4314792138409454e-06,
+ "loss": 0.4879,
+ "step": 1262
+ },
+ {
+ "epoch": 3.9488517745302714,
+ "grad_norm": 0.8883433938026428,
+ "learning_rate": 1.4275665622015908e-06,
+ "loss": 0.4489,
+ "step": 1263
+ },
+ {
+ "epoch": 3.951983298538622,
+ "grad_norm": 0.8176298141479492,
+ "learning_rate": 1.4236571271500909e-06,
+ "loss": 0.4583,
+ "step": 1264
+ },
+ {
+ "epoch": 3.9551148225469728,
+ "grad_norm": 0.8042430281639099,
+ "learning_rate": 1.4197509204121563e-06,
+ "loss": 0.4277,
+ "step": 1265
+ },
+ {
+ "epoch": 3.9582463465553235,
+ "grad_norm": 0.8153829574584961,
+ "learning_rate": 1.4158479537038095e-06,
+ "loss": 0.4389,
+ "step": 1266
+ },
+ {
+ "epoch": 3.961377870563674,
+ "grad_norm": 0.7908188104629517,
+ "learning_rate": 1.4119482387313588e-06,
+ "loss": 0.4421,
+ "step": 1267
+ },
+ {
+ "epoch": 3.964509394572025,
+ "grad_norm": 0.831758975982666,
+ "learning_rate": 1.4080517871913596e-06,
+ "loss": 0.4308,
+ "step": 1268
+ },
+ {
+ "epoch": 3.9676409185803756,
+ "grad_norm": 0.8191989064216614,
+ "learning_rate": 1.4041586107705758e-06,
+ "loss": 0.4654,
+ "step": 1269
+ },
+ {
+ "epoch": 3.9707724425887267,
+ "grad_norm": 0.9455055594444275,
+ "learning_rate": 1.4002687211459524e-06,
+ "loss": 0.4668,
+ "step": 1270
+ },
+ {
+ "epoch": 3.973903966597077,
+ "grad_norm": 0.9271034002304077,
+ "learning_rate": 1.396382129984572e-06,
+ "loss": 0.4414,
+ "step": 1271
+ },
+ {
+ "epoch": 3.977035490605428,
+ "grad_norm": 0.7652955651283264,
+ "learning_rate": 1.392498848943627e-06,
+ "loss": 0.4575,
+ "step": 1272
+ },
+ {
+ "epoch": 3.980167014613779,
+ "grad_norm": 0.7850046157836914,
+ "learning_rate": 1.3886188896703816e-06,
+ "loss": 0.4554,
+ "step": 1273
+ },
+ {
+ "epoch": 3.9832985386221296,
+ "grad_norm": 0.7194349765777588,
+ "learning_rate": 1.3847422638021357e-06,
+ "loss": 0.437,
+ "step": 1274
+ },
+ {
+ "epoch": 3.9864300626304803,
+ "grad_norm": 0.8726270198822021,
+ "learning_rate": 1.3808689829661899e-06,
+ "loss": 0.4657,
+ "step": 1275
+ },
+ {
+ "epoch": 3.989561586638831,
+ "grad_norm": 0.7741451263427734,
+ "learning_rate": 1.3769990587798146e-06,
+ "loss": 0.3931,
+ "step": 1276
+ },
+ {
+ "epoch": 3.9926931106471817,
+ "grad_norm": 0.9160324931144714,
+ "learning_rate": 1.3731325028502116e-06,
+ "loss": 0.4358,
+ "step": 1277
+ },
+ {
+ "epoch": 3.9958246346555324,
+ "grad_norm": 0.7819761037826538,
+ "learning_rate": 1.3692693267744806e-06,
+ "loss": 0.4767,
+ "step": 1278
+ },
+ {
+ "epoch": 3.998956158663883,
+ "grad_norm": 0.7595860958099365,
+ "learning_rate": 1.365409542139583e-06,
+ "loss": 0.4217,
+ "step": 1279
+ },
+ {
+ "epoch": 4.0,
+ "grad_norm": 0.7595860958099365,
+ "learning_rate": 1.361553160522307e-06,
+ "loss": 0.1478,
+ "step": 1280
+ },
+ {
+ "epoch": 4.003131524008351,
+ "grad_norm": 1.0793898105621338,
+ "learning_rate": 1.357700193489237e-06,
+ "loss": 0.4453,
+ "step": 1281
+ },
+ {
+ "epoch": 4.006263048016701,
+ "grad_norm": 0.8228668570518494,
+ "learning_rate": 1.3538506525967148e-06,
+ "loss": 0.3981,
+ "step": 1282
+ },
+ {
+ "epoch": 4.009394572025053,
+ "grad_norm": 0.7946518063545227,
+ "learning_rate": 1.3500045493908044e-06,
+ "loss": 0.423,
+ "step": 1283
+ },
+ {
+ "epoch": 4.012526096033403,
+ "grad_norm": 0.7924147248268127,
+ "learning_rate": 1.3461618954072614e-06,
+ "loss": 0.4211,
+ "step": 1284
+ },
+ {
+ "epoch": 4.015657620041754,
+ "grad_norm": 0.891967236995697,
+ "learning_rate": 1.3423227021714937e-06,
+ "loss": 0.4365,
+ "step": 1285
+ },
+ {
+ "epoch": 4.018789144050104,
+ "grad_norm": 0.9970533847808838,
+ "learning_rate": 1.3384869811985323e-06,
+ "loss": 0.4053,
+ "step": 1286
+ },
+ {
+ "epoch": 4.021920668058455,
+ "grad_norm": 1.001489281654358,
+ "learning_rate": 1.33465474399299e-06,
+ "loss": 0.4382,
+ "step": 1287
+ },
+ {
+ "epoch": 4.025052192066806,
+ "grad_norm": 0.7584173083305359,
+ "learning_rate": 1.3308260020490332e-06,
+ "loss": 0.441,
+ "step": 1288
+ },
+ {
+ "epoch": 4.028183716075157,
+ "grad_norm": 0.7444385290145874,
+ "learning_rate": 1.327000766850345e-06,
+ "loss": 0.4172,
+ "step": 1289
+ },
+ {
+ "epoch": 4.031315240083507,
+ "grad_norm": 0.8264429569244385,
+ "learning_rate": 1.3231790498700886e-06,
+ "loss": 0.3956,
+ "step": 1290
+ },
+ {
+ "epoch": 4.034446764091858,
+ "grad_norm": 0.7339959740638733,
+ "learning_rate": 1.319360862570877e-06,
+ "loss": 0.4431,
+ "step": 1291
+ },
+ {
+ "epoch": 4.0375782881002085,
+ "grad_norm": 0.9837627410888672,
+ "learning_rate": 1.3155462164047345e-06,
+ "loss": 0.4439,
+ "step": 1292
+ },
+ {
+ "epoch": 4.04070981210856,
+ "grad_norm": 0.8792996406555176,
+ "learning_rate": 1.3117351228130657e-06,
+ "loss": 0.4664,
+ "step": 1293
+ },
+ {
+ "epoch": 4.04384133611691,
+ "grad_norm": 0.8377962112426758,
+ "learning_rate": 1.3079275932266205e-06,
+ "loss": 0.4267,
+ "step": 1294
+ },
+ {
+ "epoch": 4.046972860125261,
+ "grad_norm": 0.755979597568512,
+ "learning_rate": 1.3041236390654577e-06,
+ "loss": 0.4394,
+ "step": 1295
+ },
+ {
+ "epoch": 4.050104384133611,
+ "grad_norm": 0.7994539737701416,
+ "learning_rate": 1.3003232717389114e-06,
+ "loss": 0.4547,
+ "step": 1296
+ },
+ {
+ "epoch": 4.053235908141962,
+ "grad_norm": 0.8308882117271423,
+ "learning_rate": 1.2965265026455598e-06,
+ "loss": 0.408,
+ "step": 1297
+ },
+ {
+ "epoch": 4.056367432150314,
+ "grad_norm": 0.8120949268341064,
+ "learning_rate": 1.292733343173188e-06,
+ "loss": 0.3524,
+ "step": 1298
+ },
+ {
+ "epoch": 4.059498956158664,
+ "grad_norm": 0.7639186978340149,
+ "learning_rate": 1.288943804698755e-06,
+ "loss": 0.4677,
+ "step": 1299
+ },
+ {
+ "epoch": 4.062630480167015,
+ "grad_norm": 0.8439323306083679,
+ "learning_rate": 1.2851578985883586e-06,
+ "loss": 0.436,
+ "step": 1300
+ },
+ {
+ "epoch": 4.065762004175365,
+ "grad_norm": 0.7270680069923401,
+ "learning_rate": 1.2813756361972001e-06,
+ "loss": 0.4519,
+ "step": 1301
+ },
+ {
+ "epoch": 4.068893528183716,
+ "grad_norm": 0.7746233940124512,
+ "learning_rate": 1.2775970288695554e-06,
+ "loss": 0.4503,
+ "step": 1302
+ },
+ {
+ "epoch": 4.072025052192067,
+ "grad_norm": 0.7725366950035095,
+ "learning_rate": 1.2738220879387376e-06,
+ "loss": 0.4467,
+ "step": 1303
+ },
+ {
+ "epoch": 4.075156576200418,
+ "grad_norm": 0.7371346950531006,
+ "learning_rate": 1.2700508247270597e-06,
+ "loss": 0.4144,
+ "step": 1304
+ },
+ {
+ "epoch": 4.078288100208768,
+ "grad_norm": 0.778403639793396,
+ "learning_rate": 1.2662832505458057e-06,
+ "loss": 0.4345,
+ "step": 1305
+ },
+ {
+ "epoch": 4.081419624217119,
+ "grad_norm": 0.8683139085769653,
+ "learning_rate": 1.2625193766951957e-06,
+ "loss": 0.4345,
+ "step": 1306
+ },
+ {
+ "epoch": 4.0845511482254695,
+ "grad_norm": 0.7849321365356445,
+ "learning_rate": 1.2587592144643513e-06,
+ "loss": 0.4505,
+ "step": 1307
+ },
+ {
+ "epoch": 4.087682672233821,
+ "grad_norm": 0.9351589679718018,
+ "learning_rate": 1.2550027751312618e-06,
+ "loss": 0.4173,
+ "step": 1308
+ },
+ {
+ "epoch": 4.090814196242171,
+ "grad_norm": 0.7938134074211121,
+ "learning_rate": 1.2512500699627489e-06,
+ "loss": 0.4061,
+ "step": 1309
+ },
+ {
+ "epoch": 4.093945720250522,
+ "grad_norm": 0.8015687465667725,
+ "learning_rate": 1.2475011102144337e-06,
+ "loss": 0.423,
+ "step": 1310
+ },
+ {
+ "epoch": 4.097077244258872,
+ "grad_norm": 0.8477723598480225,
+ "learning_rate": 1.2437559071307062e-06,
+ "loss": 0.4475,
+ "step": 1311
+ },
+ {
+ "epoch": 4.1002087682672235,
+ "grad_norm": 0.7643985152244568,
+ "learning_rate": 1.2400144719446885e-06,
+ "loss": 0.4613,
+ "step": 1312
+ },
+ {
+ "epoch": 4.103340292275574,
+ "grad_norm": 0.7612454295158386,
+ "learning_rate": 1.2362768158781985e-06,
+ "loss": 0.4333,
+ "step": 1313
+ },
+ {
+ "epoch": 4.106471816283925,
+ "grad_norm": 0.9039669036865234,
+ "learning_rate": 1.2325429501417232e-06,
+ "loss": 0.4091,
+ "step": 1314
+ },
+ {
+ "epoch": 4.109603340292275,
+ "grad_norm": 0.8423281908035278,
+ "learning_rate": 1.228812885934378e-06,
+ "loss": 0.4482,
+ "step": 1315
+ },
+ {
+ "epoch": 4.112734864300626,
+ "grad_norm": 0.8980326652526855,
+ "learning_rate": 1.2250866344438782e-06,
+ "loss": 0.4354,
+ "step": 1316
+ },
+ {
+ "epoch": 4.115866388308977,
+ "grad_norm": 0.7539350986480713,
+ "learning_rate": 1.221364206846502e-06,
+ "loss": 0.4332,
+ "step": 1317
+ },
+ {
+ "epoch": 4.118997912317328,
+ "grad_norm": 0.855925977230072,
+ "learning_rate": 1.2176456143070597e-06,
+ "loss": 0.4198,
+ "step": 1318
+ },
+ {
+ "epoch": 4.122129436325679,
+ "grad_norm": 0.7506774663925171,
+ "learning_rate": 1.2139308679788594e-06,
+ "loss": 0.4484,
+ "step": 1319
+ },
+ {
+ "epoch": 4.125260960334029,
+ "grad_norm": 0.820524275302887,
+ "learning_rate": 1.2102199790036709e-06,
+ "loss": 0.4208,
+ "step": 1320
+ },
+ {
+ "epoch": 4.12839248434238,
+ "grad_norm": 0.8255232572555542,
+ "learning_rate": 1.2065129585116968e-06,
+ "loss": 0.4431,
+ "step": 1321
+ },
+ {
+ "epoch": 4.1315240083507305,
+ "grad_norm": 0.7454650402069092,
+ "learning_rate": 1.2028098176215347e-06,
+ "loss": 0.3833,
+ "step": 1322
+ },
+ {
+ "epoch": 4.134655532359082,
+ "grad_norm": 0.7500002980232239,
+ "learning_rate": 1.199110567440148e-06,
+ "loss": 0.4486,
+ "step": 1323
+ },
+ {
+ "epoch": 4.137787056367432,
+ "grad_norm": 0.783704400062561,
+ "learning_rate": 1.1954152190628304e-06,
+ "loss": 0.4535,
+ "step": 1324
+ },
+ {
+ "epoch": 4.140918580375783,
+ "grad_norm": 0.7665114402770996,
+ "learning_rate": 1.1917237835731713e-06,
+ "loss": 0.4575,
+ "step": 1325
+ },
+ {
+ "epoch": 4.144050104384133,
+ "grad_norm": 0.7992947697639465,
+ "learning_rate": 1.1880362720430235e-06,
+ "loss": 0.4294,
+ "step": 1326
+ },
+ {
+ "epoch": 4.1471816283924845,
+ "grad_norm": 0.8273386359214783,
+ "learning_rate": 1.1843526955324736e-06,
+ "loss": 0.4578,
+ "step": 1327
+ },
+ {
+ "epoch": 4.150313152400835,
+ "grad_norm": 0.7992604970932007,
+ "learning_rate": 1.1806730650898029e-06,
+ "loss": 0.4125,
+ "step": 1328
+ },
+ {
+ "epoch": 4.153444676409186,
+ "grad_norm": 0.7935183644294739,
+ "learning_rate": 1.1769973917514612e-06,
+ "loss": 0.4537,
+ "step": 1329
+ },
+ {
+ "epoch": 4.156576200417536,
+ "grad_norm": 0.8761199116706848,
+ "learning_rate": 1.173325686542022e-06,
+ "loss": 0.4433,
+ "step": 1330
+ },
+ {
+ "epoch": 4.159707724425887,
+ "grad_norm": 0.7764279246330261,
+ "learning_rate": 1.1696579604741643e-06,
+ "loss": 0.4165,
+ "step": 1331
+ },
+ {
+ "epoch": 4.162839248434238,
+ "grad_norm": 0.9479373693466187,
+ "learning_rate": 1.165994224548629e-06,
+ "loss": 0.42,
+ "step": 1332
+ },
+ {
+ "epoch": 4.165970772442589,
+ "grad_norm": 1.1482913494110107,
+ "learning_rate": 1.162334489754191e-06,
+ "loss": 0.4545,
+ "step": 1333
+ },
+ {
+ "epoch": 4.16910229645094,
+ "grad_norm": 0.825964093208313,
+ "learning_rate": 1.1586787670676228e-06,
+ "loss": 0.4423,
+ "step": 1334
+ },
+ {
+ "epoch": 4.17223382045929,
+ "grad_norm": 0.8352368474006653,
+ "learning_rate": 1.1550270674536626e-06,
+ "loss": 0.4509,
+ "step": 1335
+ },
+ {
+ "epoch": 4.175365344467641,
+ "grad_norm": 0.8211585879325867,
+ "learning_rate": 1.1513794018649846e-06,
+ "loss": 0.4482,
+ "step": 1336
+ },
+ {
+ "epoch": 4.1784968684759916,
+ "grad_norm": 0.7863073945045471,
+ "learning_rate": 1.1477357812421628e-06,
+ "loss": 0.4318,
+ "step": 1337
+ },
+ {
+ "epoch": 4.181628392484343,
+ "grad_norm": 0.8023143410682678,
+ "learning_rate": 1.1440962165136366e-06,
+ "loss": 0.4269,
+ "step": 1338
+ },
+ {
+ "epoch": 4.184759916492693,
+ "grad_norm": 0.8101564049720764,
+ "learning_rate": 1.1404607185956843e-06,
+ "loss": 0.4284,
+ "step": 1339
+ },
+ {
+ "epoch": 4.187891440501044,
+ "grad_norm": 0.755865216255188,
+ "learning_rate": 1.1368292983923826e-06,
+ "loss": 0.4195,
+ "step": 1340
+ },
+ {
+ "epoch": 4.191022964509394,
+ "grad_norm": 0.7991048097610474,
+ "learning_rate": 1.1332019667955805e-06,
+ "loss": 0.3774,
+ "step": 1341
+ },
+ {
+ "epoch": 4.1941544885177455,
+ "grad_norm": 0.7407388091087341,
+ "learning_rate": 1.1295787346848641e-06,
+ "loss": 0.4321,
+ "step": 1342
+ },
+ {
+ "epoch": 4.197286012526096,
+ "grad_norm": 0.8769527673721313,
+ "learning_rate": 1.1259596129275205e-06,
+ "loss": 0.4349,
+ "step": 1343
+ },
+ {
+ "epoch": 4.200417536534447,
+ "grad_norm": 0.7908845543861389,
+ "learning_rate": 1.1223446123785128e-06,
+ "loss": 0.4595,
+ "step": 1344
+ },
+ {
+ "epoch": 4.203549060542797,
+ "grad_norm": 0.810578465461731,
+ "learning_rate": 1.1187337438804394e-06,
+ "loss": 0.4464,
+ "step": 1345
+ },
+ {
+ "epoch": 4.206680584551148,
+ "grad_norm": 0.8122991323471069,
+ "learning_rate": 1.1151270182635082e-06,
+ "loss": 0.4437,
+ "step": 1346
+ },
+ {
+ "epoch": 4.209812108559499,
+ "grad_norm": 0.8065986633300781,
+ "learning_rate": 1.1115244463454988e-06,
+ "loss": 0.4198,
+ "step": 1347
+ },
+ {
+ "epoch": 4.21294363256785,
+ "grad_norm": 0.7652766108512878,
+ "learning_rate": 1.107926038931734e-06,
+ "loss": 0.4141,
+ "step": 1348
+ },
+ {
+ "epoch": 4.2160751565762,
+ "grad_norm": 0.7936646938323975,
+ "learning_rate": 1.1043318068150457e-06,
+ "loss": 0.43,
+ "step": 1349
+ },
+ {
+ "epoch": 4.219206680584551,
+ "grad_norm": 0.8732025027275085,
+ "learning_rate": 1.1007417607757435e-06,
+ "loss": 0.4243,
+ "step": 1350
+ },
+ {
+ "epoch": 4.222338204592901,
+ "grad_norm": 0.8516861200332642,
+ "learning_rate": 1.0971559115815795e-06,
+ "loss": 0.4352,
+ "step": 1351
+ },
+ {
+ "epoch": 4.225469728601253,
+ "grad_norm": 0.7985629439353943,
+ "learning_rate": 1.093574269987718e-06,
+ "loss": 0.4193,
+ "step": 1352
+ },
+ {
+ "epoch": 4.228601252609604,
+ "grad_norm": 0.7350221872329712,
+ "learning_rate": 1.0899968467367056e-06,
+ "loss": 0.4466,
+ "step": 1353
+ },
+ {
+ "epoch": 4.231732776617954,
+ "grad_norm": 0.7626602649688721,
+ "learning_rate": 1.086423652558436e-06,
+ "loss": 0.399,
+ "step": 1354
+ },
+ {
+ "epoch": 4.234864300626305,
+ "grad_norm": 0.7596367597579956,
+ "learning_rate": 1.082854698170117e-06,
+ "loss": 0.4023,
+ "step": 1355
+ },
+ {
+ "epoch": 4.237995824634655,
+ "grad_norm": 0.7802876830101013,
+ "learning_rate": 1.0792899942762406e-06,
+ "loss": 0.4351,
+ "step": 1356
+ },
+ {
+ "epoch": 4.241127348643007,
+ "grad_norm": 1.0809392929077148,
+ "learning_rate": 1.0757295515685504e-06,
+ "loss": 0.4271,
+ "step": 1357
+ },
+ {
+ "epoch": 4.244258872651357,
+ "grad_norm": 0.8581835031509399,
+ "learning_rate": 1.0721733807260094e-06,
+ "loss": 0.4332,
+ "step": 1358
+ },
+ {
+ "epoch": 4.247390396659708,
+ "grad_norm": 0.884496808052063,
+ "learning_rate": 1.0686214924147686e-06,
+ "loss": 0.4317,
+ "step": 1359
+ },
+ {
+ "epoch": 4.250521920668058,
+ "grad_norm": 0.7651242017745972,
+ "learning_rate": 1.0650738972881325e-06,
+ "loss": 0.4614,
+ "step": 1360
+ },
+ {
+ "epoch": 4.253653444676409,
+ "grad_norm": 0.7798812985420227,
+ "learning_rate": 1.0615306059865286e-06,
+ "loss": 0.4088,
+ "step": 1361
+ },
+ {
+ "epoch": 4.25678496868476,
+ "grad_norm": 0.9011983871459961,
+ "learning_rate": 1.0579916291374776e-06,
+ "loss": 0.4242,
+ "step": 1362
+ },
+ {
+ "epoch": 4.259916492693111,
+ "grad_norm": 0.7490668892860413,
+ "learning_rate": 1.0544569773555602e-06,
+ "loss": 0.4369,
+ "step": 1363
+ },
+ {
+ "epoch": 4.263048016701461,
+ "grad_norm": 0.7936168909072876,
+ "learning_rate": 1.0509266612423819e-06,
+ "loss": 0.4321,
+ "step": 1364
+ },
+ {
+ "epoch": 4.266179540709812,
+ "grad_norm": 0.8428751826286316,
+ "learning_rate": 1.047400691386547e-06,
+ "loss": 0.4391,
+ "step": 1365
+ },
+ {
+ "epoch": 4.2693110647181625,
+ "grad_norm": 0.7458098530769348,
+ "learning_rate": 1.0438790783636213e-06,
+ "loss": 0.4789,
+ "step": 1366
+ },
+ {
+ "epoch": 4.272442588726514,
+ "grad_norm": 0.8869938254356384,
+ "learning_rate": 1.0403618327361056e-06,
+ "loss": 0.4181,
+ "step": 1367
+ },
+ {
+ "epoch": 4.275574112734864,
+ "grad_norm": 0.7711350321769714,
+ "learning_rate": 1.0368489650533989e-06,
+ "loss": 0.4334,
+ "step": 1368
+ },
+ {
+ "epoch": 4.278705636743215,
+ "grad_norm": 0.7643832564353943,
+ "learning_rate": 1.0333404858517712e-06,
+ "loss": 0.4444,
+ "step": 1369
+ },
+ {
+ "epoch": 4.281837160751566,
+ "grad_norm": 0.925422191619873,
+ "learning_rate": 1.02983640565433e-06,
+ "loss": 0.4134,
+ "step": 1370
+ },
+ {
+ "epoch": 4.284968684759916,
+ "grad_norm": 1.4223377704620361,
+ "learning_rate": 1.0263367349709866e-06,
+ "loss": 0.4441,
+ "step": 1371
+ },
+ {
+ "epoch": 4.288100208768268,
+ "grad_norm": 0.9713383316993713,
+ "learning_rate": 1.0228414842984297e-06,
+ "loss": 0.418,
+ "step": 1372
+ },
+ {
+ "epoch": 4.291231732776618,
+ "grad_norm": 0.8378648161888123,
+ "learning_rate": 1.0193506641200874e-06,
+ "loss": 0.4017,
+ "step": 1373
+ },
+ {
+ "epoch": 4.294363256784969,
+ "grad_norm": 0.9343763589859009,
+ "learning_rate": 1.0158642849061018e-06,
+ "loss": 0.4468,
+ "step": 1374
+ },
+ {
+ "epoch": 4.297494780793319,
+ "grad_norm": 0.7863892912864685,
+ "learning_rate": 1.012382357113296e-06,
+ "loss": 0.4235,
+ "step": 1375
+ },
+ {
+ "epoch": 4.30062630480167,
+ "grad_norm": 0.8109874725341797,
+ "learning_rate": 1.0089048911851388e-06,
+ "loss": 0.4356,
+ "step": 1376
+ },
+ {
+ "epoch": 4.303757828810021,
+ "grad_norm": 0.7978389859199524,
+ "learning_rate": 1.005431897551717e-06,
+ "loss": 0.4517,
+ "step": 1377
+ },
+ {
+ "epoch": 4.306889352818372,
+ "grad_norm": 0.8213954567909241,
+ "learning_rate": 1.001963386629705e-06,
+ "loss": 0.4263,
+ "step": 1378
+ },
+ {
+ "epoch": 4.310020876826722,
+ "grad_norm": 0.7367708683013916,
+ "learning_rate": 9.984993688223319e-07,
+ "loss": 0.4467,
+ "step": 1379
+ },
+ {
+ "epoch": 4.313152400835073,
+ "grad_norm": 0.8723400831222534,
+ "learning_rate": 9.950398545193508e-07,
+ "loss": 0.4517,
+ "step": 1380
+ },
+ {
+ "epoch": 4.3162839248434235,
+ "grad_norm": 0.7927054166793823,
+ "learning_rate": 9.915848540970033e-07,
+ "loss": 0.4755,
+ "step": 1381
+ },
+ {
+ "epoch": 4.319415448851775,
+ "grad_norm": 0.8598108291625977,
+ "learning_rate": 9.88134377917997e-07,
+ "loss": 0.4539,
+ "step": 1382
+ },
+ {
+ "epoch": 4.322546972860125,
+ "grad_norm": 1.0660723447799683,
+ "learning_rate": 9.84688436331468e-07,
+ "loss": 0.4541,
+ "step": 1383
+ },
+ {
+ "epoch": 4.325678496868476,
+ "grad_norm": 0.840942919254303,
+ "learning_rate": 9.81247039672953e-07,
+ "loss": 0.4239,
+ "step": 1384
+ },
+ {
+ "epoch": 4.328810020876826,
+ "grad_norm": 0.8142675757408142,
+ "learning_rate": 9.778101982643549e-07,
+ "loss": 0.4259,
+ "step": 1385
+ },
+ {
+ "epoch": 4.3319415448851775,
+ "grad_norm": 0.7841728329658508,
+ "learning_rate": 9.743779224139133e-07,
+ "loss": 0.432,
+ "step": 1386
+ },
+ {
+ "epoch": 4.335073068893529,
+ "grad_norm": 0.9642646908760071,
+ "learning_rate": 9.709502224161769e-07,
+ "loss": 0.4423,
+ "step": 1387
+ },
+ {
+ "epoch": 4.338204592901879,
+ "grad_norm": 0.7967872619628906,
+ "learning_rate": 9.675271085519692e-07,
+ "loss": 0.4428,
+ "step": 1388
+ },
+ {
+ "epoch": 4.34133611691023,
+ "grad_norm": 0.7652244567871094,
+ "learning_rate": 9.64108591088356e-07,
+ "loss": 0.4607,
+ "step": 1389
+ },
+ {
+ "epoch": 4.34446764091858,
+ "grad_norm": 0.8806408643722534,
+ "learning_rate": 9.606946802786204e-07,
+ "loss": 0.3859,
+ "step": 1390
+ },
+ {
+ "epoch": 4.347599164926931,
+ "grad_norm": 0.8638584613800049,
+ "learning_rate": 9.572853863622252e-07,
+ "loss": 0.3758,
+ "step": 1391
+ },
+ {
+ "epoch": 4.350730688935282,
+ "grad_norm": 0.7262263894081116,
+ "learning_rate": 9.538807195647882e-07,
+ "loss": 0.4435,
+ "step": 1392
+ },
+ {
+ "epoch": 4.353862212943633,
+ "grad_norm": 0.785175621509552,
+ "learning_rate": 9.504806900980482e-07,
+ "loss": 0.4037,
+ "step": 1393
+ },
+ {
+ "epoch": 4.356993736951983,
+ "grad_norm": 0.8310196995735168,
+ "learning_rate": 9.470853081598338e-07,
+ "loss": 0.4434,
+ "step": 1394
+ },
+ {
+ "epoch": 4.360125260960334,
+ "grad_norm": 0.7547351717948914,
+ "learning_rate": 9.436945839340364e-07,
+ "loss": 0.4318,
+ "step": 1395
+ },
+ {
+ "epoch": 4.3632567849686845,
+ "grad_norm": 0.816460907459259,
+ "learning_rate": 9.403085275905746e-07,
+ "loss": 0.4173,
+ "step": 1396
+ },
+ {
+ "epoch": 4.366388308977036,
+ "grad_norm": 0.8159666657447815,
+ "learning_rate": 9.369271492853696e-07,
+ "loss": 0.4506,
+ "step": 1397
+ },
+ {
+ "epoch": 4.369519832985386,
+ "grad_norm": 0.8234115839004517,
+ "learning_rate": 9.335504591603084e-07,
+ "loss": 0.4186,
+ "step": 1398
+ },
+ {
+ "epoch": 4.372651356993737,
+ "grad_norm": 0.9084867238998413,
+ "learning_rate": 9.301784673432187e-07,
+ "loss": 0.4414,
+ "step": 1399
+ },
+ {
+ "epoch": 4.375782881002087,
+ "grad_norm": 0.7826300263404846,
+ "learning_rate": 9.268111839478369e-07,
+ "loss": 0.4415,
+ "step": 1400
+ },
+ {
+ "epoch": 4.3789144050104385,
+ "grad_norm": 0.7957226634025574,
+ "learning_rate": 9.23448619073774e-07,
+ "loss": 0.4307,
+ "step": 1401
+ },
+ {
+ "epoch": 4.382045929018789,
+ "grad_norm": 0.7768528461456299,
+ "learning_rate": 9.200907828064931e-07,
+ "loss": 0.4107,
+ "step": 1402
+ },
+ {
+ "epoch": 4.38517745302714,
+ "grad_norm": 0.878063440322876,
+ "learning_rate": 9.167376852172702e-07,
+ "loss": 0.4232,
+ "step": 1403
+ },
+ {
+ "epoch": 4.388308977035491,
+ "grad_norm": 0.7961450219154358,
+ "learning_rate": 9.133893363631713e-07,
+ "loss": 0.449,
+ "step": 1404
+ },
+ {
+ "epoch": 4.391440501043841,
+ "grad_norm": 0.8734915256500244,
+ "learning_rate": 9.100457462870197e-07,
+ "loss": 0.4392,
+ "step": 1405
+ },
+ {
+ "epoch": 4.3945720250521925,
+ "grad_norm": 0.7659426331520081,
+ "learning_rate": 9.06706925017363e-07,
+ "loss": 0.4391,
+ "step": 1406
+ },
+ {
+ "epoch": 4.397703549060543,
+ "grad_norm": 0.949674665927887,
+ "learning_rate": 9.033728825684466e-07,
+ "loss": 0.4054,
+ "step": 1407
+ },
+ {
+ "epoch": 4.400835073068894,
+ "grad_norm": 0.8209534287452698,
+ "learning_rate": 9.000436289401832e-07,
+ "loss": 0.3973,
+ "step": 1408
+ },
+ {
+ "epoch": 4.403966597077244,
+ "grad_norm": 0.9697580933570862,
+ "learning_rate": 8.967191741181225e-07,
+ "loss": 0.4228,
+ "step": 1409
+ },
+ {
+ "epoch": 4.407098121085595,
+ "grad_norm": 0.7681871652603149,
+ "learning_rate": 8.933995280734217e-07,
+ "loss": 0.4183,
+ "step": 1410
+ },
+ {
+ "epoch": 4.4102296450939455,
+ "grad_norm": 0.7434380054473877,
+ "learning_rate": 8.900847007628103e-07,
+ "loss": 0.4068,
+ "step": 1411
+ },
+ {
+ "epoch": 4.413361169102297,
+ "grad_norm": 0.9835489988327026,
+ "learning_rate": 8.867747021285697e-07,
+ "loss": 0.4356,
+ "step": 1412
+ },
+ {
+ "epoch": 4.416492693110647,
+ "grad_norm": 0.7727295160293579,
+ "learning_rate": 8.834695420984971e-07,
+ "loss": 0.4201,
+ "step": 1413
+ },
+ {
+ "epoch": 4.419624217118998,
+ "grad_norm": 0.8018465042114258,
+ "learning_rate": 8.801692305858775e-07,
+ "loss": 0.4471,
+ "step": 1414
+ },
+ {
+ "epoch": 4.422755741127348,
+ "grad_norm": 0.8191575407981873,
+ "learning_rate": 8.76873777489452e-07,
+ "loss": 0.4438,
+ "step": 1415
+ },
+ {
+ "epoch": 4.4258872651356995,
+ "grad_norm": 0.7755978107452393,
+ "learning_rate": 8.735831926933896e-07,
+ "loss": 0.4441,
+ "step": 1416
+ },
+ {
+ "epoch": 4.42901878914405,
+ "grad_norm": 0.8469848036766052,
+ "learning_rate": 8.702974860672603e-07,
+ "loss": 0.4134,
+ "step": 1417
+ },
+ {
+ "epoch": 4.432150313152401,
+ "grad_norm": 0.7887343168258667,
+ "learning_rate": 8.67016667466001e-07,
+ "loss": 0.4247,
+ "step": 1418
+ },
+ {
+ "epoch": 4.435281837160751,
+ "grad_norm": 0.7888761758804321,
+ "learning_rate": 8.637407467298867e-07,
+ "loss": 0.4313,
+ "step": 1419
+ },
+ {
+ "epoch": 4.438413361169102,
+ "grad_norm": 0.8109477162361145,
+ "learning_rate": 8.60469733684505e-07,
+ "loss": 0.4472,
+ "step": 1420
+ },
+ {
+ "epoch": 4.4415448851774535,
+ "grad_norm": 0.8876966238021851,
+ "learning_rate": 8.572036381407203e-07,
+ "loss": 0.4395,
+ "step": 1421
+ },
+ {
+ "epoch": 4.444676409185804,
+ "grad_norm": 0.7669360637664795,
+ "learning_rate": 8.539424698946503e-07,
+ "loss": 0.3943,
+ "step": 1422
+ },
+ {
+ "epoch": 4.447807933194154,
+ "grad_norm": 0.7810583710670471,
+ "learning_rate": 8.506862387276338e-07,
+ "loss": 0.4189,
+ "step": 1423
+ },
+ {
+ "epoch": 4.450939457202505,
+ "grad_norm": 0.9363948702812195,
+ "learning_rate": 8.474349544061996e-07,
+ "loss": 0.4245,
+ "step": 1424
+ },
+ {
+ "epoch": 4.454070981210856,
+ "grad_norm": 0.8147290349006653,
+ "learning_rate": 8.441886266820418e-07,
+ "loss": 0.4381,
+ "step": 1425
+ },
+ {
+ "epoch": 4.457202505219207,
+ "grad_norm": 0.9227074384689331,
+ "learning_rate": 8.409472652919873e-07,
+ "loss": 0.4289,
+ "step": 1426
+ },
+ {
+ "epoch": 4.460334029227558,
+ "grad_norm": 0.8162581920623779,
+ "learning_rate": 8.377108799579661e-07,
+ "loss": 0.4565,
+ "step": 1427
+ },
+ {
+ "epoch": 4.463465553235908,
+ "grad_norm": 0.7634908556938171,
+ "learning_rate": 8.344794803869835e-07,
+ "loss": 0.4402,
+ "step": 1428
+ },
+ {
+ "epoch": 4.466597077244259,
+ "grad_norm": 0.7960299253463745,
+ "learning_rate": 8.312530762710924e-07,
+ "loss": 0.4533,
+ "step": 1429
+ },
+ {
+ "epoch": 4.469728601252609,
+ "grad_norm": 0.8322834968566895,
+ "learning_rate": 8.280316772873617e-07,
+ "loss": 0.4282,
+ "step": 1430
+ },
+ {
+ "epoch": 4.4728601252609606,
+ "grad_norm": 0.7285633683204651,
+ "learning_rate": 8.248152930978498e-07,
+ "loss": 0.4287,
+ "step": 1431
+ },
+ {
+ "epoch": 4.475991649269311,
+ "grad_norm": 0.760178804397583,
+ "learning_rate": 8.21603933349569e-07,
+ "loss": 0.4243,
+ "step": 1432
+ },
+ {
+ "epoch": 4.479123173277662,
+ "grad_norm": 0.8069238662719727,
+ "learning_rate": 8.183976076744671e-07,
+ "loss": 0.4352,
+ "step": 1433
+ },
+ {
+ "epoch": 4.482254697286012,
+ "grad_norm": 0.8068020343780518,
+ "learning_rate": 8.151963256893911e-07,
+ "loss": 0.4143,
+ "step": 1434
+ },
+ {
+ "epoch": 4.485386221294363,
+ "grad_norm": 0.7954995036125183,
+ "learning_rate": 8.120000969960606e-07,
+ "loss": 0.4122,
+ "step": 1435
+ },
+ {
+ "epoch": 4.488517745302714,
+ "grad_norm": 0.791763961315155,
+ "learning_rate": 8.088089311810379e-07,
+ "loss": 0.4016,
+ "step": 1436
+ },
+ {
+ "epoch": 4.491649269311065,
+ "grad_norm": 0.8935474157333374,
+ "learning_rate": 8.056228378156994e-07,
+ "loss": 0.4392,
+ "step": 1437
+ },
+ {
+ "epoch": 4.494780793319415,
+ "grad_norm": 0.791764497756958,
+ "learning_rate": 8.024418264562094e-07,
+ "loss": 0.4433,
+ "step": 1438
+ },
+ {
+ "epoch": 4.497912317327766,
+ "grad_norm": 0.8151891827583313,
+ "learning_rate": 7.992659066434899e-07,
+ "loss": 0.4161,
+ "step": 1439
+ },
+ {
+ "epoch": 4.5010438413361165,
+ "grad_norm": 0.9389538168907166,
+ "learning_rate": 7.960950879031884e-07,
+ "loss": 0.4405,
+ "step": 1440
+ },
+ {
+ "epoch": 4.504175365344468,
+ "grad_norm": 0.7620337009429932,
+ "learning_rate": 7.929293797456561e-07,
+ "loss": 0.4443,
+ "step": 1441
+ },
+ {
+ "epoch": 4.507306889352819,
+ "grad_norm": 0.7784422039985657,
+ "learning_rate": 7.897687916659133e-07,
+ "loss": 0.4688,
+ "step": 1442
+ },
+ {
+ "epoch": 4.510438413361169,
+ "grad_norm": 0.759872555732727,
+ "learning_rate": 7.866133331436249e-07,
+ "loss": 0.4349,
+ "step": 1443
+ },
+ {
+ "epoch": 4.51356993736952,
+ "grad_norm": 0.8815323114395142,
+ "learning_rate": 7.834630136430713e-07,
+ "loss": 0.4729,
+ "step": 1444
+ },
+ {
+ "epoch": 4.51670146137787,
+ "grad_norm": 0.7574021816253662,
+ "learning_rate": 7.803178426131161e-07,
+ "loss": 0.4263,
+ "step": 1445
+ },
+ {
+ "epoch": 4.519832985386222,
+ "grad_norm": 0.7910036444664001,
+ "learning_rate": 7.771778294871849e-07,
+ "loss": 0.424,
+ "step": 1446
+ },
+ {
+ "epoch": 4.522964509394572,
+ "grad_norm": 0.8388727903366089,
+ "learning_rate": 7.740429836832295e-07,
+ "loss": 0.4495,
+ "step": 1447
+ },
+ {
+ "epoch": 4.526096033402923,
+ "grad_norm": 0.7924241423606873,
+ "learning_rate": 7.709133146037059e-07,
+ "loss": 0.4157,
+ "step": 1448
+ },
+ {
+ "epoch": 4.529227557411273,
+ "grad_norm": 0.8103229403495789,
+ "learning_rate": 7.677888316355411e-07,
+ "loss": 0.4321,
+ "step": 1449
+ },
+ {
+ "epoch": 4.532359081419624,
+ "grad_norm": 0.8175046443939209,
+ "learning_rate": 7.64669544150109e-07,
+ "loss": 0.4375,
+ "step": 1450
+ },
+ {
+ "epoch": 4.535490605427975,
+ "grad_norm": 0.8193264603614807,
+ "learning_rate": 7.615554615032e-07,
+ "loss": 0.4576,
+ "step": 1451
+ },
+ {
+ "epoch": 4.538622129436326,
+ "grad_norm": 0.7646095156669617,
+ "learning_rate": 7.584465930349924e-07,
+ "loss": 0.4095,
+ "step": 1452
+ },
+ {
+ "epoch": 4.541753653444676,
+ "grad_norm": 0.8025760054588318,
+ "learning_rate": 7.553429480700275e-07,
+ "loss": 0.4387,
+ "step": 1453
+ },
+ {
+ "epoch": 4.544885177453027,
+ "grad_norm": 0.7339401841163635,
+ "learning_rate": 7.522445359171768e-07,
+ "loss": 0.4263,
+ "step": 1454
+ },
+ {
+ "epoch": 4.5480167014613775,
+ "grad_norm": 0.764500617980957,
+ "learning_rate": 7.491513658696189e-07,
+ "loss": 0.4694,
+ "step": 1455
+ },
+ {
+ "epoch": 4.551148225469729,
+ "grad_norm": 0.7974144220352173,
+ "learning_rate": 7.460634472048104e-07,
+ "loss": 0.4335,
+ "step": 1456
+ },
+ {
+ "epoch": 4.554279749478079,
+ "grad_norm": 0.7994454503059387,
+ "learning_rate": 7.429807891844546e-07,
+ "loss": 0.4155,
+ "step": 1457
+ },
+ {
+ "epoch": 4.55741127348643,
+ "grad_norm": 0.8552598357200623,
+ "learning_rate": 7.39903401054477e-07,
+ "loss": 0.428,
+ "step": 1458
+ },
+ {
+ "epoch": 4.560542797494781,
+ "grad_norm": 0.8604694604873657,
+ "learning_rate": 7.368312920449985e-07,
+ "loss": 0.4562,
+ "step": 1459
+ },
+ {
+ "epoch": 4.5636743215031315,
+ "grad_norm": 0.7699970602989197,
+ "learning_rate": 7.337644713703049e-07,
+ "loss": 0.4322,
+ "step": 1460
+ },
+ {
+ "epoch": 4.566805845511483,
+ "grad_norm": 0.9426809549331665,
+ "learning_rate": 7.307029482288227e-07,
+ "loss": 0.387,
+ "step": 1461
+ },
+ {
+ "epoch": 4.569937369519833,
+ "grad_norm": 0.8332602381706238,
+ "learning_rate": 7.276467318030841e-07,
+ "loss": 0.4381,
+ "step": 1462
+ },
+ {
+ "epoch": 4.573068893528184,
+ "grad_norm": 0.7953102588653564,
+ "learning_rate": 7.245958312597095e-07,
+ "loss": 0.3994,
+ "step": 1463
+ },
+ {
+ "epoch": 4.576200417536534,
+ "grad_norm": 0.7888246178627014,
+ "learning_rate": 7.215502557493743e-07,
+ "loss": 0.4191,
+ "step": 1464
+ },
+ {
+ "epoch": 4.579331941544885,
+ "grad_norm": 1.00043523311615,
+ "learning_rate": 7.185100144067816e-07,
+ "loss": 0.4321,
+ "step": 1465
+ },
+ {
+ "epoch": 4.582463465553236,
+ "grad_norm": 0.8209455609321594,
+ "learning_rate": 7.154751163506354e-07,
+ "loss": 0.3841,
+ "step": 1466
+ },
+ {
+ "epoch": 4.585594989561587,
+ "grad_norm": 0.7574297785758972,
+ "learning_rate": 7.124455706836131e-07,
+ "loss": 0.452,
+ "step": 1467
+ },
+ {
+ "epoch": 4.588726513569937,
+ "grad_norm": 0.8509530425071716,
+ "learning_rate": 7.094213864923397e-07,
+ "loss": 0.4225,
+ "step": 1468
+ },
+ {
+ "epoch": 4.591858037578288,
+ "grad_norm": 0.7906093597412109,
+ "learning_rate": 7.064025728473589e-07,
+ "loss": 0.418,
+ "step": 1469
+ },
+ {
+ "epoch": 4.5949895615866385,
+ "grad_norm": 0.7408525943756104,
+ "learning_rate": 7.033891388031056e-07,
+ "loss": 0.4366,
+ "step": 1470
+ },
+ {
+ "epoch": 4.59812108559499,
+ "grad_norm": 0.766342043876648,
+ "learning_rate": 7.003810933978805e-07,
+ "loss": 0.4315,
+ "step": 1471
+ },
+ {
+ "epoch": 4.60125260960334,
+ "grad_norm": 0.7852875590324402,
+ "learning_rate": 6.973784456538207e-07,
+ "loss": 0.4332,
+ "step": 1472
+ },
+ {
+ "epoch": 4.604384133611691,
+ "grad_norm": 0.7975596785545349,
+ "learning_rate": 6.943812045768753e-07,
+ "loss": 0.4563,
+ "step": 1473
+ },
+ {
+ "epoch": 4.607515657620041,
+ "grad_norm": 0.755770742893219,
+ "learning_rate": 6.913893791567767e-07,
+ "loss": 0.417,
+ "step": 1474
+ },
+ {
+ "epoch": 4.6106471816283925,
+ "grad_norm": 0.8083193302154541,
+ "learning_rate": 6.884029783670123e-07,
+ "loss": 0.4285,
+ "step": 1475
+ },
+ {
+ "epoch": 4.613778705636744,
+ "grad_norm": 0.8146732449531555,
+ "learning_rate": 6.854220111648022e-07,
+ "loss": 0.4587,
+ "step": 1476
+ },
+ {
+ "epoch": 4.616910229645094,
+ "grad_norm": 0.7958261370658875,
+ "learning_rate": 6.82446486491066e-07,
+ "loss": 0.4584,
+ "step": 1477
+ },
+ {
+ "epoch": 4.620041753653445,
+ "grad_norm": 0.9063606262207031,
+ "learning_rate": 6.79476413270402e-07,
+ "loss": 0.4056,
+ "step": 1478
+ },
+ {
+ "epoch": 4.623173277661795,
+ "grad_norm": 0.8176311254501343,
+ "learning_rate": 6.76511800411056e-07,
+ "loss": 0.3987,
+ "step": 1479
+ },
+ {
+ "epoch": 4.6263048016701465,
+ "grad_norm": 0.7843388319015503,
+ "learning_rate": 6.735526568048975e-07,
+ "loss": 0.4541,
+ "step": 1480
+ },
+ {
+ "epoch": 4.629436325678497,
+ "grad_norm": 0.7491182684898376,
+ "learning_rate": 6.705989913273922e-07,
+ "loss": 0.4218,
+ "step": 1481
+ },
+ {
+ "epoch": 4.632567849686848,
+ "grad_norm": 0.8836645483970642,
+ "learning_rate": 6.676508128375728e-07,
+ "loss": 0.4153,
+ "step": 1482
+ },
+ {
+ "epoch": 4.635699373695198,
+ "grad_norm": 0.7614302635192871,
+ "learning_rate": 6.647081301780175e-07,
+ "loss": 0.4557,
+ "step": 1483
+ },
+ {
+ "epoch": 4.638830897703549,
+ "grad_norm": 0.8144665360450745,
+ "learning_rate": 6.617709521748181e-07,
+ "loss": 0.4103,
+ "step": 1484
+ },
+ {
+ "epoch": 4.6419624217118995,
+ "grad_norm": 0.757859468460083,
+ "learning_rate": 6.588392876375579e-07,
+ "loss": 0.4498,
+ "step": 1485
+ },
+ {
+ "epoch": 4.645093945720251,
+ "grad_norm": 0.8298365473747253,
+ "learning_rate": 6.559131453592837e-07,
+ "loss": 0.4318,
+ "step": 1486
+ },
+ {
+ "epoch": 4.648225469728601,
+ "grad_norm": 0.9429208636283875,
+ "learning_rate": 6.529925341164781e-07,
+ "loss": 0.4272,
+ "step": 1487
+ },
+ {
+ "epoch": 4.651356993736952,
+ "grad_norm": 0.7761420011520386,
+ "learning_rate": 6.500774626690329e-07,
+ "loss": 0.4224,
+ "step": 1488
+ },
+ {
+ "epoch": 4.654488517745302,
+ "grad_norm": 0.8003484606742859,
+ "learning_rate": 6.471679397602273e-07,
+ "loss": 0.4333,
+ "step": 1489
+ },
+ {
+ "epoch": 4.6576200417536535,
+ "grad_norm": 0.8728703856468201,
+ "learning_rate": 6.44263974116697e-07,
+ "loss": 0.4412,
+ "step": 1490
+ },
+ {
+ "epoch": 4.660751565762004,
+ "grad_norm": 0.7923367023468018,
+ "learning_rate": 6.41365574448411e-07,
+ "loss": 0.4401,
+ "step": 1491
+ },
+ {
+ "epoch": 4.663883089770355,
+ "grad_norm": 0.8110585808753967,
+ "learning_rate": 6.384727494486398e-07,
+ "loss": 0.4152,
+ "step": 1492
+ },
+ {
+ "epoch": 4.667014613778706,
+ "grad_norm": 0.7865241765975952,
+ "learning_rate": 6.355855077939385e-07,
+ "loss": 0.4608,
+ "step": 1493
+ },
+ {
+ "epoch": 4.670146137787056,
+ "grad_norm": 0.8125724196434021,
+ "learning_rate": 6.327038581441136e-07,
+ "loss": 0.419,
+ "step": 1494
+ },
+ {
+ "epoch": 4.673277661795407,
+ "grad_norm": 0.8024583458900452,
+ "learning_rate": 6.298278091422003e-07,
+ "loss": 0.4575,
+ "step": 1495
+ },
+ {
+ "epoch": 4.676409185803758,
+ "grad_norm": 0.7569037675857544,
+ "learning_rate": 6.269573694144343e-07,
+ "loss": 0.4257,
+ "step": 1496
+ },
+ {
+ "epoch": 4.679540709812109,
+ "grad_norm": 0.783733069896698,
+ "learning_rate": 6.240925475702275e-07,
+ "loss": 0.4361,
+ "step": 1497
+ },
+ {
+ "epoch": 4.682672233820459,
+ "grad_norm": 0.7900946736335754,
+ "learning_rate": 6.212333522021424e-07,
+ "loss": 0.4372,
+ "step": 1498
+ },
+ {
+ "epoch": 4.68580375782881,
+ "grad_norm": 0.7943285703659058,
+ "learning_rate": 6.183797918858667e-07,
+ "loss": 0.4337,
+ "step": 1499
+ },
+ {
+ "epoch": 4.688935281837161,
+ "grad_norm": 0.7793418765068054,
+ "learning_rate": 6.155318751801842e-07,
+ "loss": 0.4264,
+ "step": 1500
+ },
+ {
+ "epoch": 4.692066805845512,
+ "grad_norm": 1.0244885683059692,
+ "learning_rate": 6.126896106269533e-07,
+ "loss": 0.446,
+ "step": 1501
+ },
+ {
+ "epoch": 4.695198329853862,
+ "grad_norm": 0.8742548823356628,
+ "learning_rate": 6.098530067510808e-07,
+ "loss": 0.3953,
+ "step": 1502
+ },
+ {
+ "epoch": 4.698329853862213,
+ "grad_norm": 0.7844028472900391,
+ "learning_rate": 6.070220720604919e-07,
+ "loss": 0.402,
+ "step": 1503
+ },
+ {
+ "epoch": 4.701461377870563,
+ "grad_norm": 0.7485696077346802,
+ "learning_rate": 6.041968150461119e-07,
+ "loss": 0.4435,
+ "step": 1504
+ },
+ {
+ "epoch": 4.7045929018789145,
+ "grad_norm": 0.8526521921157837,
+ "learning_rate": 6.01377244181833e-07,
+ "loss": 0.4565,
+ "step": 1505
+ },
+ {
+ "epoch": 4.707724425887265,
+ "grad_norm": 0.7832267880439758,
+ "learning_rate": 5.985633679244957e-07,
+ "loss": 0.4489,
+ "step": 1506
+ },
+ {
+ "epoch": 4.710855949895616,
+ "grad_norm": 0.8092924952507019,
+ "learning_rate": 5.957551947138599e-07,
+ "loss": 0.4011,
+ "step": 1507
+ },
+ {
+ "epoch": 4.713987473903966,
+ "grad_norm": 0.8008314371109009,
+ "learning_rate": 5.92952732972579e-07,
+ "loss": 0.4557,
+ "step": 1508
+ },
+ {
+ "epoch": 4.717118997912317,
+ "grad_norm": 0.8025342226028442,
+ "learning_rate": 5.901559911061758e-07,
+ "loss": 0.38,
+ "step": 1509
+ },
+ {
+ "epoch": 4.7202505219206685,
+ "grad_norm": 0.8290171027183533,
+ "learning_rate": 5.873649775030188e-07,
+ "loss": 0.4019,
+ "step": 1510
+ },
+ {
+ "epoch": 4.723382045929019,
+ "grad_norm": 0.8971850872039795,
+ "learning_rate": 5.845797005342943e-07,
+ "loss": 0.4354,
+ "step": 1511
+ },
+ {
+ "epoch": 4.726513569937369,
+ "grad_norm": 0.8087455630302429,
+ "learning_rate": 5.818001685539843e-07,
+ "loss": 0.4537,
+ "step": 1512
+ },
+ {
+ "epoch": 4.72964509394572,
+ "grad_norm": 0.822287380695343,
+ "learning_rate": 5.790263898988355e-07,
+ "loss": 0.4692,
+ "step": 1513
+ },
+ {
+ "epoch": 4.732776617954071,
+ "grad_norm": 1.007889986038208,
+ "learning_rate": 5.762583728883428e-07,
+ "loss": 0.4317,
+ "step": 1514
+ },
+ {
+ "epoch": 4.735908141962422,
+ "grad_norm": 0.7517583966255188,
+ "learning_rate": 5.734961258247177e-07,
+ "loss": 0.4168,
+ "step": 1515
+ },
+ {
+ "epoch": 4.739039665970773,
+ "grad_norm": 1.1469851732254028,
+ "learning_rate": 5.707396569928675e-07,
+ "loss": 0.4412,
+ "step": 1516
+ },
+ {
+ "epoch": 4.742171189979123,
+ "grad_norm": 0.8889291286468506,
+ "learning_rate": 5.679889746603667e-07,
+ "loss": 0.4133,
+ "step": 1517
+ },
+ {
+ "epoch": 4.745302713987474,
+ "grad_norm": 0.7792209386825562,
+ "learning_rate": 5.652440870774342e-07,
+ "loss": 0.4256,
+ "step": 1518
+ },
+ {
+ "epoch": 4.748434237995824,
+ "grad_norm": 0.7862168550491333,
+ "learning_rate": 5.625050024769099e-07,
+ "loss": 0.395,
+ "step": 1519
+ },
+ {
+ "epoch": 4.751565762004176,
+ "grad_norm": 0.8305843472480774,
+ "learning_rate": 5.597717290742285e-07,
+ "loss": 0.4172,
+ "step": 1520
+ },
+ {
+ "epoch": 4.754697286012526,
+ "grad_norm": 0.8393134474754333,
+ "learning_rate": 5.570442750673932e-07,
+ "loss": 0.4438,
+ "step": 1521
+ },
+ {
+ "epoch": 4.757828810020877,
+ "grad_norm": 0.8481642603874207,
+ "learning_rate": 5.543226486369555e-07,
+ "loss": 0.4251,
+ "step": 1522
+ },
+ {
+ "epoch": 4.760960334029227,
+ "grad_norm": 0.8482305407524109,
+ "learning_rate": 5.516068579459849e-07,
+ "loss": 0.4372,
+ "step": 1523
+ },
+ {
+ "epoch": 4.764091858037578,
+ "grad_norm": 0.7428895235061646,
+ "learning_rate": 5.488969111400502e-07,
+ "loss": 0.4017,
+ "step": 1524
+ },
+ {
+ "epoch": 4.767223382045929,
+ "grad_norm": 0.9066843390464783,
+ "learning_rate": 5.461928163471922e-07,
+ "loss": 0.4263,
+ "step": 1525
+ },
+ {
+ "epoch": 4.77035490605428,
+ "grad_norm": 0.8754342794418335,
+ "learning_rate": 5.434945816778972e-07,
+ "loss": 0.4086,
+ "step": 1526
+ },
+ {
+ "epoch": 4.773486430062631,
+ "grad_norm": 0.7580026388168335,
+ "learning_rate": 5.408022152250783e-07,
+ "loss": 0.4331,
+ "step": 1527
+ },
+ {
+ "epoch": 4.776617954070981,
+ "grad_norm": 0.7960991859436035,
+ "learning_rate": 5.381157250640451e-07,
+ "loss": 0.4482,
+ "step": 1528
+ },
+ {
+ "epoch": 4.7797494780793315,
+ "grad_norm": 0.8199532628059387,
+ "learning_rate": 5.354351192524842e-07,
+ "loss": 0.4558,
+ "step": 1529
+ },
+ {
+ "epoch": 4.782881002087683,
+ "grad_norm": 0.8457065224647522,
+ "learning_rate": 5.327604058304312e-07,
+ "loss": 0.4711,
+ "step": 1530
+ },
+ {
+ "epoch": 4.786012526096034,
+ "grad_norm": 0.9078546762466431,
+ "learning_rate": 5.300915928202499e-07,
+ "loss": 0.4341,
+ "step": 1531
+ },
+ {
+ "epoch": 4.789144050104384,
+ "grad_norm": 0.7782992720603943,
+ "learning_rate": 5.274286882266072e-07,
+ "loss": 0.4115,
+ "step": 1532
+ },
+ {
+ "epoch": 4.792275574112735,
+ "grad_norm": 0.894313395023346,
+ "learning_rate": 5.247717000364463e-07,
+ "loss": 0.4415,
+ "step": 1533
+ },
+ {
+ "epoch": 4.7954070981210855,
+ "grad_norm": 0.8233445882797241,
+ "learning_rate": 5.221206362189682e-07,
+ "loss": 0.4198,
+ "step": 1534
+ },
+ {
+ "epoch": 4.798538622129437,
+ "grad_norm": 0.7654063105583191,
+ "learning_rate": 5.194755047256017e-07,
+ "loss": 0.4364,
+ "step": 1535
+ },
+ {
+ "epoch": 4.801670146137787,
+ "grad_norm": 0.7879475951194763,
+ "learning_rate": 5.168363134899845e-07,
+ "loss": 0.4366,
+ "step": 1536
+ },
+ {
+ "epoch": 4.804801670146138,
+ "grad_norm": 0.7754428386688232,
+ "learning_rate": 5.142030704279377e-07,
+ "loss": 0.4365,
+ "step": 1537
+ },
+ {
+ "epoch": 4.807933194154488,
+ "grad_norm": 0.758696436882019,
+ "learning_rate": 5.11575783437441e-07,
+ "loss": 0.4442,
+ "step": 1538
+ },
+ {
+ "epoch": 4.811064718162839,
+ "grad_norm": 0.7870402336120605,
+ "learning_rate": 5.089544603986085e-07,
+ "loss": 0.4298,
+ "step": 1539
+ },
+ {
+ "epoch": 4.81419624217119,
+ "grad_norm": 0.7607670426368713,
+ "learning_rate": 5.063391091736691e-07,
+ "loss": 0.435,
+ "step": 1540
+ },
+ {
+ "epoch": 4.817327766179541,
+ "grad_norm": 0.8175606727600098,
+ "learning_rate": 5.037297376069388e-07,
+ "loss": 0.4325,
+ "step": 1541
+ },
+ {
+ "epoch": 4.820459290187891,
+ "grad_norm": 0.9083423018455505,
+ "learning_rate": 5.011263535248001e-07,
+ "loss": 0.3847,
+ "step": 1542
+ },
+ {
+ "epoch": 4.823590814196242,
+ "grad_norm": 0.7435231804847717,
+ "learning_rate": 4.985289647356731e-07,
+ "loss": 0.4818,
+ "step": 1543
+ },
+ {
+ "epoch": 4.826722338204593,
+ "grad_norm": 1.5190348625183105,
+ "learning_rate": 4.9593757903e-07,
+ "loss": 0.4068,
+ "step": 1544
+ },
+ {
+ "epoch": 4.829853862212944,
+ "grad_norm": 0.9122394919395447,
+ "learning_rate": 4.933522041802166e-07,
+ "loss": 0.4701,
+ "step": 1545
+ },
+ {
+ "epoch": 4.832985386221294,
+ "grad_norm": 0.9334122538566589,
+ "learning_rate": 4.9077284794073e-07,
+ "loss": 0.4178,
+ "step": 1546
+ },
+ {
+ "epoch": 4.836116910229645,
+ "grad_norm": 0.8561045527458191,
+ "learning_rate": 4.881995180478949e-07,
+ "loss": 0.4373,
+ "step": 1547
+ },
+ {
+ "epoch": 4.839248434237996,
+ "grad_norm": 0.8061811923980713,
+ "learning_rate": 4.85632222219991e-07,
+ "loss": 0.4414,
+ "step": 1548
+ },
+ {
+ "epoch": 4.8423799582463465,
+ "grad_norm": 0.7866719961166382,
+ "learning_rate": 4.830709681572008e-07,
+ "loss": 0.438,
+ "step": 1549
+ },
+ {
+ "epoch": 4.845511482254698,
+ "grad_norm": 0.8633884787559509,
+ "learning_rate": 4.805157635415852e-07,
+ "loss": 0.4639,
+ "step": 1550
+ },
+ {
+ "epoch": 4.848643006263048,
+ "grad_norm": 1.125654935836792,
+ "learning_rate": 4.779666160370596e-07,
+ "loss": 0.4364,
+ "step": 1551
+ },
+ {
+ "epoch": 4.851774530271399,
+ "grad_norm": 0.8371828198432922,
+ "learning_rate": 4.7542353328937473e-07,
+ "loss": 0.4436,
+ "step": 1552
+ },
+ {
+ "epoch": 4.854906054279749,
+ "grad_norm": 0.8166123628616333,
+ "learning_rate": 4.7288652292608736e-07,
+ "loss": 0.4382,
+ "step": 1553
+ },
+ {
+ "epoch": 4.8580375782881005,
+ "grad_norm": 0.8664330244064331,
+ "learning_rate": 4.7035559255654393e-07,
+ "loss": 0.4314,
+ "step": 1554
+ },
+ {
+ "epoch": 4.861169102296451,
+ "grad_norm": 0.8051781058311462,
+ "learning_rate": 4.6783074977185485e-07,
+ "loss": 0.4522,
+ "step": 1555
+ },
+ {
+ "epoch": 4.864300626304802,
+ "grad_norm": 0.7385572791099548,
+ "learning_rate": 4.653120021448701e-07,
+ "loss": 0.4254,
+ "step": 1556
+ },
+ {
+ "epoch": 4.867432150313152,
+ "grad_norm": 0.7310816645622253,
+ "learning_rate": 4.6279935723016033e-07,
+ "loss": 0.4277,
+ "step": 1557
+ },
+ {
+ "epoch": 4.870563674321503,
+ "grad_norm": 0.7682080864906311,
+ "learning_rate": 4.602928225639899e-07,
+ "loss": 0.422,
+ "step": 1558
+ },
+ {
+ "epoch": 4.8736951983298535,
+ "grad_norm": 0.8093141317367554,
+ "learning_rate": 4.577924056642985e-07,
+ "loss": 0.4376,
+ "step": 1559
+ },
+ {
+ "epoch": 4.876826722338205,
+ "grad_norm": 0.8006629943847656,
+ "learning_rate": 4.5529811403067503e-07,
+ "loss": 0.4483,
+ "step": 1560
+ },
+ {
+ "epoch": 4.879958246346555,
+ "grad_norm": 0.7647503614425659,
+ "learning_rate": 4.528099551443377e-07,
+ "loss": 0.418,
+ "step": 1561
+ },
+ {
+ "epoch": 4.883089770354906,
+ "grad_norm": 0.8467709422111511,
+ "learning_rate": 4.503279364681104e-07,
+ "loss": 0.4272,
+ "step": 1562
+ },
+ {
+ "epoch": 4.886221294363256,
+ "grad_norm": 0.8667131662368774,
+ "learning_rate": 4.4785206544640055e-07,
+ "loss": 0.4347,
+ "step": 1563
+ },
+ {
+ "epoch": 4.8893528183716075,
+ "grad_norm": 0.8670737147331238,
+ "learning_rate": 4.453823495051748e-07,
+ "loss": 0.4313,
+ "step": 1564
+ },
+ {
+ "epoch": 4.892484342379959,
+ "grad_norm": 0.8131253719329834,
+ "learning_rate": 4.4291879605194157e-07,
+ "loss": 0.4335,
+ "step": 1565
+ },
+ {
+ "epoch": 4.895615866388309,
+ "grad_norm": 0.9633654356002808,
+ "learning_rate": 4.404614124757242e-07,
+ "loss": 0.4637,
+ "step": 1566
+ },
+ {
+ "epoch": 4.89874739039666,
+ "grad_norm": 0.9058681726455688,
+ "learning_rate": 4.3801020614704174e-07,
+ "loss": 0.4503,
+ "step": 1567
+ },
+ {
+ "epoch": 4.90187891440501,
+ "grad_norm": 0.7923601865768433,
+ "learning_rate": 4.3556518441788405e-07,
+ "loss": 0.4526,
+ "step": 1568
+ },
+ {
+ "epoch": 4.9050104384133615,
+ "grad_norm": 0.7733371257781982,
+ "learning_rate": 4.331263546216913e-07,
+ "loss": 0.437,
+ "step": 1569
+ },
+ {
+ "epoch": 4.908141962421712,
+ "grad_norm": 0.7897522449493408,
+ "learning_rate": 4.306937240733336e-07,
+ "loss": 0.4213,
+ "step": 1570
+ },
+ {
+ "epoch": 4.911273486430063,
+ "grad_norm": 0.7857375741004944,
+ "learning_rate": 4.282673000690865e-07,
+ "loss": 0.4576,
+ "step": 1571
+ },
+ {
+ "epoch": 4.914405010438413,
+ "grad_norm": 0.7968363761901855,
+ "learning_rate": 4.258470898866102e-07,
+ "loss": 0.4383,
+ "step": 1572
+ },
+ {
+ "epoch": 4.917536534446764,
+ "grad_norm": 0.762876033782959,
+ "learning_rate": 4.2343310078492626e-07,
+ "loss": 0.4129,
+ "step": 1573
+ },
+ {
+ "epoch": 4.920668058455115,
+ "grad_norm": 0.9070468544960022,
+ "learning_rate": 4.21025340004399e-07,
+ "loss": 0.3994,
+ "step": 1574
+ },
+ {
+ "epoch": 4.923799582463466,
+ "grad_norm": 0.8299228549003601,
+ "learning_rate": 4.186238147667113e-07,
+ "loss": 0.4461,
+ "step": 1575
+ },
+ {
+ "epoch": 4.926931106471816,
+ "grad_norm": 0.8604512214660645,
+ "learning_rate": 4.162285322748441e-07,
+ "loss": 0.4447,
+ "step": 1576
+ },
+ {
+ "epoch": 4.930062630480167,
+ "grad_norm": 0.8441461324691772,
+ "learning_rate": 4.1383949971305207e-07,
+ "loss": 0.4547,
+ "step": 1577
+ },
+ {
+ "epoch": 4.933194154488517,
+ "grad_norm": 0.7836902737617493,
+ "learning_rate": 4.114567242468473e-07,
+ "loss": 0.4311,
+ "step": 1578
+ },
+ {
+ "epoch": 4.9363256784968685,
+ "grad_norm": 0.7957454919815063,
+ "learning_rate": 4.0908021302297205e-07,
+ "loss": 0.4074,
+ "step": 1579
+ },
+ {
+ "epoch": 4.939457202505219,
+ "grad_norm": 0.772109866142273,
+ "learning_rate": 4.0670997316938216e-07,
+ "loss": 0.4696,
+ "step": 1580
+ },
+ {
+ "epoch": 4.94258872651357,
+ "grad_norm": 1.3906599283218384,
+ "learning_rate": 4.043460117952214e-07,
+ "loss": 0.4379,
+ "step": 1581
+ },
+ {
+ "epoch": 4.945720250521921,
+ "grad_norm": 0.9287625551223755,
+ "learning_rate": 4.0198833599080415e-07,
+ "loss": 0.4576,
+ "step": 1582
+ },
+ {
+ "epoch": 4.948851774530271,
+ "grad_norm": 0.8610300421714783,
+ "learning_rate": 3.99636952827592e-07,
+ "loss": 0.4158,
+ "step": 1583
+ },
+ {
+ "epoch": 4.9519832985386225,
+ "grad_norm": 0.7845875024795532,
+ "learning_rate": 3.972918693581712e-07,
+ "loss": 0.4428,
+ "step": 1584
+ },
+ {
+ "epoch": 4.955114822546973,
+ "grad_norm": 0.8758688569068909,
+ "learning_rate": 3.949530926162354e-07,
+ "loss": 0.4113,
+ "step": 1585
+ },
+ {
+ "epoch": 4.958246346555324,
+ "grad_norm": 0.797717809677124,
+ "learning_rate": 3.926206296165599e-07,
+ "loss": 0.4181,
+ "step": 1586
+ },
+ {
+ "epoch": 4.961377870563674,
+ "grad_norm": 0.7839358448982239,
+ "learning_rate": 3.9029448735498495e-07,
+ "loss": 0.4218,
+ "step": 1587
+ },
+ {
+ "epoch": 4.964509394572025,
+ "grad_norm": 0.8518044352531433,
+ "learning_rate": 3.8797467280839216e-07,
+ "loss": 0.4425,
+ "step": 1588
+ },
+ {
+ "epoch": 4.967640918580376,
+ "grad_norm": 0.7976752519607544,
+ "learning_rate": 3.856611929346837e-07,
+ "loss": 0.4736,
+ "step": 1589
+ },
+ {
+ "epoch": 4.970772442588727,
+ "grad_norm": 0.9000734090805054,
+ "learning_rate": 3.8335405467276184e-07,
+ "loss": 0.4282,
+ "step": 1590
+ },
+ {
+ "epoch": 4.973903966597077,
+ "grad_norm": 0.8424035906791687,
+ "learning_rate": 3.810532649425089e-07,
+ "loss": 0.4443,
+ "step": 1591
+ },
+ {
+ "epoch": 4.977035490605428,
+ "grad_norm": 0.8014306426048279,
+ "learning_rate": 3.7875883064476564e-07,
+ "loss": 0.4617,
+ "step": 1592
+ },
+ {
+ "epoch": 4.980167014613778,
+ "grad_norm": 0.7423905730247498,
+ "learning_rate": 3.76470758661312e-07,
+ "loss": 0.468,
+ "step": 1593
+ },
+ {
+ "epoch": 4.98329853862213,
+ "grad_norm": 0.7726851105690002,
+ "learning_rate": 3.741890558548414e-07,
+ "loss": 0.3824,
+ "step": 1594
+ },
+ {
+ "epoch": 4.98643006263048,
+ "grad_norm": 0.7915946245193481,
+ "learning_rate": 3.719137290689479e-07,
+ "loss": 0.4091,
+ "step": 1595
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1914,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 319,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 4.832478095742704e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-1595/training_args.bin b/checkpoint-1595/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..8067ee9c1c0bc752bdfd00cfcaf1a6e717d2356b
--- /dev/null
+++ b/checkpoint-1595/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c650156a192cae0a2070c4323ee8a93e9b52fb76041d59ae0633b98389585727
+size 7928
diff --git a/checkpoint-1595/zero_to_fp32.py b/checkpoint-1595/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-1595/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-1914/README.md b/checkpoint-1914/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4a3934800eeb082a0cb833d7b6af4f68eed3615
--- /dev/null
+++ b/checkpoint-1914/README.md
@@ -0,0 +1,202 @@
+---
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-1914/adapter_config.json b/checkpoint-1914/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/checkpoint-1914/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-1914/adapter_model.safetensors b/checkpoint-1914/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7d16d638ba5f1f0d6e17a6a0445d8d989c8ed984
--- /dev/null
+++ b/checkpoint-1914/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a8cb98185ecb57fad2b2cd35f0c47cae364129464746319ff1be6d8581c7a3a9
+size 9016826528
diff --git a/checkpoint-1914/global_step1910/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-1914/global_step1910/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0bdf3673ea7afbd7b9f53dff3f76cdd1c715d052
--- /dev/null
+++ b/checkpoint-1914/global_step1910/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d69c4a6a0d43cfa67e8b570fd1fd08d5a97fbcdbbd74c051b9246440c74c3a4e
+size 27050164444
diff --git a/checkpoint-1914/global_step1910/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-1914/global_step1910/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..d472eae6fe89d7db48ba709625dcc331dc9031a7
--- /dev/null
+++ b/checkpoint-1914/global_step1910/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:55607f1577a54d3930469747c642afe4e3b759952305d0b7ccc74ffdc9ff4d18
+size 27050169884
diff --git a/checkpoint-1914/global_step1910/mp_rank_00_model_states.pt b/checkpoint-1914/global_step1910/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a820bbbcb6acfb8adba9a85c54a1d58fb399e496
--- /dev/null
+++ b/checkpoint-1914/global_step1910/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2febc93df50ccbf08ff22419a2955756d96439de06bfba0e7145eea9b610077f
+size 9776788601
diff --git a/checkpoint-1914/latest b/checkpoint-1914/latest
new file mode 100644
index 0000000000000000000000000000000000000000..44970e72d9272500910003990fd0742fc709640f
--- /dev/null
+++ b/checkpoint-1914/latest
@@ -0,0 +1 @@
+global_step1910
\ No newline at end of file
diff --git a/checkpoint-1914/rng_state_0.pth b/checkpoint-1914/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..20f781331db199355d352dd623f2cfa1bd655144
--- /dev/null
+++ b/checkpoint-1914/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c76958bd6fb582aa7128013f6648323584e6359a68e13537634a12b640e33b3b
+size 14512
diff --git a/checkpoint-1914/rng_state_1.pth b/checkpoint-1914/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..6b6d09621a9f9c529ded1d3367386d4b7dbfa3d6
--- /dev/null
+++ b/checkpoint-1914/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0468a880821a2b59a3405b34372a9bfe1bffa49d6f45e70fe1a332aaf2506fd4
+size 14512
diff --git a/checkpoint-1914/scheduler.pt b/checkpoint-1914/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..ce8debe0515e0d91db0a76ec2fd8617dad934be3
--- /dev/null
+++ b/checkpoint-1914/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:518502dfa64b1b4e7140cd5c1d3de6ab9c448c69ea1aa3d6aa99b8b0f388bb09
+size 1064
diff --git a/checkpoint-1914/special_tokens_map.json b/checkpoint-1914/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-1914/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-1914/tokenizer.json b/checkpoint-1914/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-1914/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-1914/tokenizer_config.json b/checkpoint-1914/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/checkpoint-1914/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-1914/trainer_state.json b/checkpoint-1914/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..52e4d90e074c683cf3ff19209008804a30cc5990
--- /dev/null
+++ b/checkpoint-1914/trainer_state.json
@@ -0,0 +1,13431 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 5.98329853862213,
+ "eval_steps": 500,
+ "global_step": 1914,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.003131524008350731,
+ "grad_norm": 13.917898178100586,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 4.1051,
+ "step": 1
+ },
+ {
+ "epoch": 0.006263048016701462,
+ "grad_norm": 17.327869415283203,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 4.1048,
+ "step": 2
+ },
+ {
+ "epoch": 0.009394572025052192,
+ "grad_norm": 14.063946723937988,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 4.0741,
+ "step": 3
+ },
+ {
+ "epoch": 0.012526096033402923,
+ "grad_norm": 16.817699432373047,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 4.2002,
+ "step": 4
+ },
+ {
+ "epoch": 0.015657620041753653,
+ "grad_norm": 14.47036361694336,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 4.2652,
+ "step": 5
+ },
+ {
+ "epoch": 0.018789144050104383,
+ "grad_norm": 14.474193572998047,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 4.0888,
+ "step": 6
+ },
+ {
+ "epoch": 0.021920668058455117,
+ "grad_norm": 14.865458488464355,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 4.0014,
+ "step": 7
+ },
+ {
+ "epoch": 0.025052192066805846,
+ "grad_norm": 15.338888168334961,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 4.13,
+ "step": 8
+ },
+ {
+ "epoch": 0.028183716075156576,
+ "grad_norm": 15.154336929321289,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 4.2493,
+ "step": 9
+ },
+ {
+ "epoch": 0.031315240083507306,
+ "grad_norm": 15.919597625732422,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 4.0535,
+ "step": 10
+ },
+ {
+ "epoch": 0.03444676409185804,
+ "grad_norm": 14.981926918029785,
+ "learning_rate": 5.5e-07,
+ "loss": 3.9064,
+ "step": 11
+ },
+ {
+ "epoch": 0.037578288100208766,
+ "grad_norm": 13.36101245880127,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 4.1939,
+ "step": 12
+ },
+ {
+ "epoch": 0.0407098121085595,
+ "grad_norm": 15.58773422241211,
+ "learning_rate": 6.5e-07,
+ "loss": 4.18,
+ "step": 13
+ },
+ {
+ "epoch": 0.04384133611691023,
+ "grad_norm": 13.560139656066895,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 3.9414,
+ "step": 14
+ },
+ {
+ "epoch": 0.04697286012526096,
+ "grad_norm": 12.307971954345703,
+ "learning_rate": 7.5e-07,
+ "loss": 3.8836,
+ "step": 15
+ },
+ {
+ "epoch": 0.05010438413361169,
+ "grad_norm": 14.533182144165039,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 4.1551,
+ "step": 16
+ },
+ {
+ "epoch": 0.05323590814196242,
+ "grad_norm": 13.453729629516602,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 4.0048,
+ "step": 17
+ },
+ {
+ "epoch": 0.05636743215031315,
+ "grad_norm": 13.45992374420166,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 4.0745,
+ "step": 18
+ },
+ {
+ "epoch": 0.059498956158663886,
+ "grad_norm": 11.857145309448242,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 3.9871,
+ "step": 19
+ },
+ {
+ "epoch": 0.06263048016701461,
+ "grad_norm": 11.872294425964355,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 3.8959,
+ "step": 20
+ },
+ {
+ "epoch": 0.06576200417536535,
+ "grad_norm": 12.969825744628906,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 4.0308,
+ "step": 21
+ },
+ {
+ "epoch": 0.06889352818371608,
+ "grad_norm": 12.33769416809082,
+ "learning_rate": 1.1e-06,
+ "loss": 3.9341,
+ "step": 22
+ },
+ {
+ "epoch": 0.0720250521920668,
+ "grad_norm": 12.669405937194824,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 3.8511,
+ "step": 23
+ },
+ {
+ "epoch": 0.07515657620041753,
+ "grad_norm": 10.677213668823242,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 3.7764,
+ "step": 24
+ },
+ {
+ "epoch": 0.07828810020876827,
+ "grad_norm": 10.366402626037598,
+ "learning_rate": 1.25e-06,
+ "loss": 3.5291,
+ "step": 25
+ },
+ {
+ "epoch": 0.081419624217119,
+ "grad_norm": 11.211421012878418,
+ "learning_rate": 1.3e-06,
+ "loss": 3.5765,
+ "step": 26
+ },
+ {
+ "epoch": 0.08455114822546973,
+ "grad_norm": 11.313716888427734,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 3.4849,
+ "step": 27
+ },
+ {
+ "epoch": 0.08768267223382047,
+ "grad_norm": 10.41294002532959,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 3.2653,
+ "step": 28
+ },
+ {
+ "epoch": 0.09081419624217119,
+ "grad_norm": 10.40064525604248,
+ "learning_rate": 1.45e-06,
+ "loss": 3.3384,
+ "step": 29
+ },
+ {
+ "epoch": 0.09394572025052192,
+ "grad_norm": 10.05427074432373,
+ "learning_rate": 1.5e-06,
+ "loss": 3.2257,
+ "step": 30
+ },
+ {
+ "epoch": 0.09707724425887265,
+ "grad_norm": 9.583163261413574,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 3.1371,
+ "step": 31
+ },
+ {
+ "epoch": 0.10020876826722339,
+ "grad_norm": 10.09977912902832,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 3.0658,
+ "step": 32
+ },
+ {
+ "epoch": 0.10334029227557412,
+ "grad_norm": 9.271486282348633,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 2.9693,
+ "step": 33
+ },
+ {
+ "epoch": 0.10647181628392484,
+ "grad_norm": 10.687992095947266,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 2.95,
+ "step": 34
+ },
+ {
+ "epoch": 0.10960334029227557,
+ "grad_norm": 8.762290000915527,
+ "learning_rate": 1.75e-06,
+ "loss": 2.8286,
+ "step": 35
+ },
+ {
+ "epoch": 0.1127348643006263,
+ "grad_norm": 10.13785171508789,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 2.3664,
+ "step": 36
+ },
+ {
+ "epoch": 0.11586638830897704,
+ "grad_norm": 18.301353454589844,
+ "learning_rate": 1.85e-06,
+ "loss": 2.5533,
+ "step": 37
+ },
+ {
+ "epoch": 0.11899791231732777,
+ "grad_norm": 11.490377426147461,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 2.6133,
+ "step": 38
+ },
+ {
+ "epoch": 0.12212943632567849,
+ "grad_norm": 15.614163398742676,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 2.3596,
+ "step": 39
+ },
+ {
+ "epoch": 0.12526096033402923,
+ "grad_norm": 17.757442474365234,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 2.3491,
+ "step": 40
+ },
+ {
+ "epoch": 0.12839248434237996,
+ "grad_norm": 17.18431854248047,
+ "learning_rate": 2.05e-06,
+ "loss": 2.2361,
+ "step": 41
+ },
+ {
+ "epoch": 0.1315240083507307,
+ "grad_norm": 16.149789810180664,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 2.1457,
+ "step": 42
+ },
+ {
+ "epoch": 0.13465553235908143,
+ "grad_norm": 15.256914138793945,
+ "learning_rate": 2.15e-06,
+ "loss": 2.12,
+ "step": 43
+ },
+ {
+ "epoch": 0.13778705636743216,
+ "grad_norm": 15.537406921386719,
+ "learning_rate": 2.2e-06,
+ "loss": 2.1877,
+ "step": 44
+ },
+ {
+ "epoch": 0.1409185803757829,
+ "grad_norm": 7.947713851928711,
+ "learning_rate": 2.25e-06,
+ "loss": 2.1648,
+ "step": 45
+ },
+ {
+ "epoch": 0.1440501043841336,
+ "grad_norm": 8.818676948547363,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 2.134,
+ "step": 46
+ },
+ {
+ "epoch": 0.14718162839248433,
+ "grad_norm": 5.175768852233887,
+ "learning_rate": 2.35e-06,
+ "loss": 2.0796,
+ "step": 47
+ },
+ {
+ "epoch": 0.15031315240083507,
+ "grad_norm": 6.750611305236816,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 1.9174,
+ "step": 48
+ },
+ {
+ "epoch": 0.1534446764091858,
+ "grad_norm": 6.2147979736328125,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 1.8065,
+ "step": 49
+ },
+ {
+ "epoch": 0.15657620041753653,
+ "grad_norm": 13.291611671447754,
+ "learning_rate": 2.5e-06,
+ "loss": 1.7061,
+ "step": 50
+ },
+ {
+ "epoch": 0.15970772442588727,
+ "grad_norm": 7.251201629638672,
+ "learning_rate": 2.55e-06,
+ "loss": 1.7924,
+ "step": 51
+ },
+ {
+ "epoch": 0.162839248434238,
+ "grad_norm": 5.2126054763793945,
+ "learning_rate": 2.6e-06,
+ "loss": 1.6735,
+ "step": 52
+ },
+ {
+ "epoch": 0.16597077244258873,
+ "grad_norm": 5.435528755187988,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 1.6265,
+ "step": 53
+ },
+ {
+ "epoch": 0.16910229645093947,
+ "grad_norm": 4.505807399749756,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 1.4851,
+ "step": 54
+ },
+ {
+ "epoch": 0.1722338204592902,
+ "grad_norm": 5.128388404846191,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 1.5832,
+ "step": 55
+ },
+ {
+ "epoch": 0.17536534446764093,
+ "grad_norm": 16.935827255249023,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 1.6553,
+ "step": 56
+ },
+ {
+ "epoch": 0.17849686847599164,
+ "grad_norm": 3.664458990097046,
+ "learning_rate": 2.85e-06,
+ "loss": 1.5,
+ "step": 57
+ },
+ {
+ "epoch": 0.18162839248434237,
+ "grad_norm": 7.763802528381348,
+ "learning_rate": 2.9e-06,
+ "loss": 1.367,
+ "step": 58
+ },
+ {
+ "epoch": 0.1847599164926931,
+ "grad_norm": 3.2216155529022217,
+ "learning_rate": 2.95e-06,
+ "loss": 1.3863,
+ "step": 59
+ },
+ {
+ "epoch": 0.18789144050104384,
+ "grad_norm": 4.384445667266846,
+ "learning_rate": 3e-06,
+ "loss": 1.4247,
+ "step": 60
+ },
+ {
+ "epoch": 0.19102296450939457,
+ "grad_norm": 4.8080878257751465,
+ "learning_rate": 3.05e-06,
+ "loss": 1.3257,
+ "step": 61
+ },
+ {
+ "epoch": 0.1941544885177453,
+ "grad_norm": 4.154761791229248,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 1.321,
+ "step": 62
+ },
+ {
+ "epoch": 0.19728601252609604,
+ "grad_norm": 6.4742112159729,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 1.2823,
+ "step": 63
+ },
+ {
+ "epoch": 0.20041753653444677,
+ "grad_norm": 2.583422899246216,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 1.2136,
+ "step": 64
+ },
+ {
+ "epoch": 0.2035490605427975,
+ "grad_norm": 4.1933488845825195,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 1.1855,
+ "step": 65
+ },
+ {
+ "epoch": 0.20668058455114824,
+ "grad_norm": 4.11049747467041,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 1.2389,
+ "step": 66
+ },
+ {
+ "epoch": 0.20981210855949894,
+ "grad_norm": 2.264458417892456,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 1.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.21294363256784968,
+ "grad_norm": 2.5408174991607666,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 1.1389,
+ "step": 68
+ },
+ {
+ "epoch": 0.2160751565762004,
+ "grad_norm": 7.82421350479126,
+ "learning_rate": 3.45e-06,
+ "loss": 1.0956,
+ "step": 69
+ },
+ {
+ "epoch": 0.21920668058455114,
+ "grad_norm": 3.070939064025879,
+ "learning_rate": 3.5e-06,
+ "loss": 1.0451,
+ "step": 70
+ },
+ {
+ "epoch": 0.22233820459290188,
+ "grad_norm": 2.6310527324676514,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 1.0538,
+ "step": 71
+ },
+ {
+ "epoch": 0.2254697286012526,
+ "grad_norm": 7.630155563354492,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 1.0052,
+ "step": 72
+ },
+ {
+ "epoch": 0.22860125260960334,
+ "grad_norm": 6.950636863708496,
+ "learning_rate": 3.65e-06,
+ "loss": 1.0473,
+ "step": 73
+ },
+ {
+ "epoch": 0.23173277661795408,
+ "grad_norm": 2.2703945636749268,
+ "learning_rate": 3.7e-06,
+ "loss": 1.0576,
+ "step": 74
+ },
+ {
+ "epoch": 0.2348643006263048,
+ "grad_norm": 3.3817710876464844,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 1.0177,
+ "step": 75
+ },
+ {
+ "epoch": 0.23799582463465555,
+ "grad_norm": 7.266414642333984,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 1.0645,
+ "step": 76
+ },
+ {
+ "epoch": 0.24112734864300625,
+ "grad_norm": 5.782608509063721,
+ "learning_rate": 3.85e-06,
+ "loss": 1.0162,
+ "step": 77
+ },
+ {
+ "epoch": 0.24425887265135698,
+ "grad_norm": 2.7938575744628906,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.9664,
+ "step": 78
+ },
+ {
+ "epoch": 0.24739039665970772,
+ "grad_norm": 6.681935787200928,
+ "learning_rate": 3.95e-06,
+ "loss": 0.953,
+ "step": 79
+ },
+ {
+ "epoch": 0.25052192066805845,
+ "grad_norm": 2.253279209136963,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.9568,
+ "step": 80
+ },
+ {
+ "epoch": 0.2536534446764092,
+ "grad_norm": 1.4875826835632324,
+ "learning_rate": 4.05e-06,
+ "loss": 0.9448,
+ "step": 81
+ },
+ {
+ "epoch": 0.2567849686847599,
+ "grad_norm": 2.4987940788269043,
+ "learning_rate": 4.1e-06,
+ "loss": 0.9393,
+ "step": 82
+ },
+ {
+ "epoch": 0.2599164926931106,
+ "grad_norm": 4.712948322296143,
+ "learning_rate": 4.15e-06,
+ "loss": 0.9532,
+ "step": 83
+ },
+ {
+ "epoch": 0.2630480167014614,
+ "grad_norm": 6.9030632972717285,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.96,
+ "step": 84
+ },
+ {
+ "epoch": 0.2661795407098121,
+ "grad_norm": 3.4780967235565186,
+ "learning_rate": 4.25e-06,
+ "loss": 0.8993,
+ "step": 85
+ },
+ {
+ "epoch": 0.26931106471816285,
+ "grad_norm": 1.526064395904541,
+ "learning_rate": 4.3e-06,
+ "loss": 0.9021,
+ "step": 86
+ },
+ {
+ "epoch": 0.27244258872651356,
+ "grad_norm": 10.727686882019043,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.856,
+ "step": 87
+ },
+ {
+ "epoch": 0.2755741127348643,
+ "grad_norm": 12.483160972595215,
+ "learning_rate": 4.4e-06,
+ "loss": 0.9357,
+ "step": 88
+ },
+ {
+ "epoch": 0.278705636743215,
+ "grad_norm": 6.544492244720459,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.9168,
+ "step": 89
+ },
+ {
+ "epoch": 0.2818371607515658,
+ "grad_norm": 1.178139567375183,
+ "learning_rate": 4.5e-06,
+ "loss": 0.8748,
+ "step": 90
+ },
+ {
+ "epoch": 0.2849686847599165,
+ "grad_norm": 1.711506962776184,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.8425,
+ "step": 91
+ },
+ {
+ "epoch": 0.2881002087682672,
+ "grad_norm": 3.281747341156006,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.8491,
+ "step": 92
+ },
+ {
+ "epoch": 0.29123173277661796,
+ "grad_norm": 2.2964377403259277,
+ "learning_rate": 4.65e-06,
+ "loss": 0.8038,
+ "step": 93
+ },
+ {
+ "epoch": 0.29436325678496866,
+ "grad_norm": 1.959700345993042,
+ "learning_rate": 4.7e-06,
+ "loss": 0.8439,
+ "step": 94
+ },
+ {
+ "epoch": 0.2974947807933194,
+ "grad_norm": 3.979384183883667,
+ "learning_rate": 4.75e-06,
+ "loss": 0.8839,
+ "step": 95
+ },
+ {
+ "epoch": 0.30062630480167013,
+ "grad_norm": 1.4721262454986572,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.845,
+ "step": 96
+ },
+ {
+ "epoch": 0.3037578288100209,
+ "grad_norm": 2.862248659133911,
+ "learning_rate": 4.85e-06,
+ "loss": 0.7748,
+ "step": 97
+ },
+ {
+ "epoch": 0.3068893528183716,
+ "grad_norm": 3.7439088821411133,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.8145,
+ "step": 98
+ },
+ {
+ "epoch": 0.31002087682672236,
+ "grad_norm": 1.6654618978500366,
+ "learning_rate": 4.95e-06,
+ "loss": 0.8326,
+ "step": 99
+ },
+ {
+ "epoch": 0.31315240083507306,
+ "grad_norm": 7.8437581062316895,
+ "learning_rate": 5e-06,
+ "loss": 0.8666,
+ "step": 100
+ },
+ {
+ "epoch": 0.3162839248434238,
+ "grad_norm": 6.429738521575928,
+ "learning_rate": 4.999996250830422e-06,
+ "loss": 0.836,
+ "step": 101
+ },
+ {
+ "epoch": 0.31941544885177453,
+ "grad_norm": 2.6017794609069824,
+ "learning_rate": 4.9999850033329326e-06,
+ "loss": 0.7785,
+ "step": 102
+ },
+ {
+ "epoch": 0.32254697286012524,
+ "grad_norm": 1.0575449466705322,
+ "learning_rate": 4.999966257541265e-06,
+ "loss": 0.7639,
+ "step": 103
+ },
+ {
+ "epoch": 0.325678496868476,
+ "grad_norm": 2.6932010650634766,
+ "learning_rate": 4.999940013511647e-06,
+ "loss": 0.8214,
+ "step": 104
+ },
+ {
+ "epoch": 0.3288100208768267,
+ "grad_norm": 2.925288438796997,
+ "learning_rate": 4.999906271322792e-06,
+ "loss": 0.8797,
+ "step": 105
+ },
+ {
+ "epoch": 0.33194154488517746,
+ "grad_norm": 1.3570607900619507,
+ "learning_rate": 4.9998650310759035e-06,
+ "loss": 0.792,
+ "step": 106
+ },
+ {
+ "epoch": 0.33507306889352817,
+ "grad_norm": 5.126713752746582,
+ "learning_rate": 4.999816292894676e-06,
+ "loss": 0.8352,
+ "step": 107
+ },
+ {
+ "epoch": 0.33820459290187893,
+ "grad_norm": 1.8966432809829712,
+ "learning_rate": 4.99976005692529e-06,
+ "loss": 0.7663,
+ "step": 108
+ },
+ {
+ "epoch": 0.34133611691022964,
+ "grad_norm": 1.3100829124450684,
+ "learning_rate": 4.999696323336418e-06,
+ "loss": 0.771,
+ "step": 109
+ },
+ {
+ "epoch": 0.3444676409185804,
+ "grad_norm": 2.4025354385375977,
+ "learning_rate": 4.999625092319218e-06,
+ "loss": 0.7618,
+ "step": 110
+ },
+ {
+ "epoch": 0.3475991649269311,
+ "grad_norm": 1.130232810974121,
+ "learning_rate": 4.999546364087334e-06,
+ "loss": 0.7705,
+ "step": 111
+ },
+ {
+ "epoch": 0.35073068893528186,
+ "grad_norm": 3.430262327194214,
+ "learning_rate": 4.999460138876901e-06,
+ "loss": 0.77,
+ "step": 112
+ },
+ {
+ "epoch": 0.35386221294363257,
+ "grad_norm": 1.1272103786468506,
+ "learning_rate": 4.999366416946536e-06,
+ "loss": 0.7133,
+ "step": 113
+ },
+ {
+ "epoch": 0.3569937369519833,
+ "grad_norm": 1.1740471124649048,
+ "learning_rate": 4.999265198577342e-06,
+ "loss": 0.7684,
+ "step": 114
+ },
+ {
+ "epoch": 0.36012526096033404,
+ "grad_norm": 1.3138248920440674,
+ "learning_rate": 4.999156484072907e-06,
+ "loss": 0.7888,
+ "step": 115
+ },
+ {
+ "epoch": 0.36325678496868474,
+ "grad_norm": 1.061711311340332,
+ "learning_rate": 4.999040273759304e-06,
+ "loss": 0.7484,
+ "step": 116
+ },
+ {
+ "epoch": 0.3663883089770355,
+ "grad_norm": 1.4682390689849854,
+ "learning_rate": 4.998916567985083e-06,
+ "loss": 0.7296,
+ "step": 117
+ },
+ {
+ "epoch": 0.3695198329853862,
+ "grad_norm": 2.884068250656128,
+ "learning_rate": 4.998785367121284e-06,
+ "loss": 0.7662,
+ "step": 118
+ },
+ {
+ "epoch": 0.37265135699373697,
+ "grad_norm": 0.9812761545181274,
+ "learning_rate": 4.9986466715614205e-06,
+ "loss": 0.7307,
+ "step": 119
+ },
+ {
+ "epoch": 0.3757828810020877,
+ "grad_norm": 2.2237496376037598,
+ "learning_rate": 4.998500481721484e-06,
+ "loss": 0.6761,
+ "step": 120
+ },
+ {
+ "epoch": 0.37891440501043844,
+ "grad_norm": 1.4004178047180176,
+ "learning_rate": 4.998346798039952e-06,
+ "loss": 0.7505,
+ "step": 121
+ },
+ {
+ "epoch": 0.38204592901878914,
+ "grad_norm": 5.54975700378418,
+ "learning_rate": 4.99818562097777e-06,
+ "loss": 0.7615,
+ "step": 122
+ },
+ {
+ "epoch": 0.38517745302713985,
+ "grad_norm": 6.17140531539917,
+ "learning_rate": 4.9980169510183624e-06,
+ "loss": 0.7002,
+ "step": 123
+ },
+ {
+ "epoch": 0.3883089770354906,
+ "grad_norm": 4.974380016326904,
+ "learning_rate": 4.997840788667628e-06,
+ "loss": 0.7449,
+ "step": 124
+ },
+ {
+ "epoch": 0.3914405010438413,
+ "grad_norm": 1.4133399724960327,
+ "learning_rate": 4.997657134453937e-06,
+ "loss": 0.7442,
+ "step": 125
+ },
+ {
+ "epoch": 0.3945720250521921,
+ "grad_norm": 1.868915319442749,
+ "learning_rate": 4.9974659889281295e-06,
+ "loss": 0.7104,
+ "step": 126
+ },
+ {
+ "epoch": 0.3977035490605428,
+ "grad_norm": 1.2599350214004517,
+ "learning_rate": 4.997267352663514e-06,
+ "loss": 0.7385,
+ "step": 127
+ },
+ {
+ "epoch": 0.40083507306889354,
+ "grad_norm": 1.4353641271591187,
+ "learning_rate": 4.997061226255869e-06,
+ "loss": 0.7081,
+ "step": 128
+ },
+ {
+ "epoch": 0.40396659707724425,
+ "grad_norm": 3.2492141723632812,
+ "learning_rate": 4.996847610323437e-06,
+ "loss": 0.7859,
+ "step": 129
+ },
+ {
+ "epoch": 0.407098121085595,
+ "grad_norm": 9.599719047546387,
+ "learning_rate": 4.996626505506923e-06,
+ "loss": 0.7241,
+ "step": 130
+ },
+ {
+ "epoch": 0.4102296450939457,
+ "grad_norm": 10.053650856018066,
+ "learning_rate": 4.996397912469494e-06,
+ "loss": 0.6841,
+ "step": 131
+ },
+ {
+ "epoch": 0.4133611691022965,
+ "grad_norm": 1.323876976966858,
+ "learning_rate": 4.996161831896777e-06,
+ "loss": 0.7317,
+ "step": 132
+ },
+ {
+ "epoch": 0.4164926931106472,
+ "grad_norm": 1.4180598258972168,
+ "learning_rate": 4.9959182644968594e-06,
+ "loss": 0.692,
+ "step": 133
+ },
+ {
+ "epoch": 0.4196242171189979,
+ "grad_norm": 1.2194396257400513,
+ "learning_rate": 4.99566721100028e-06,
+ "loss": 0.7068,
+ "step": 134
+ },
+ {
+ "epoch": 0.42275574112734865,
+ "grad_norm": 1.0984960794448853,
+ "learning_rate": 4.995408672160031e-06,
+ "loss": 0.6946,
+ "step": 135
+ },
+ {
+ "epoch": 0.42588726513569936,
+ "grad_norm": 1.9341071844100952,
+ "learning_rate": 4.995142648751561e-06,
+ "loss": 0.7467,
+ "step": 136
+ },
+ {
+ "epoch": 0.4290187891440501,
+ "grad_norm": 1.9960932731628418,
+ "learning_rate": 4.9948691415727594e-06,
+ "loss": 0.7379,
+ "step": 137
+ },
+ {
+ "epoch": 0.4321503131524008,
+ "grad_norm": 0.8743917942047119,
+ "learning_rate": 4.994588151443968e-06,
+ "loss": 0.66,
+ "step": 138
+ },
+ {
+ "epoch": 0.4352818371607516,
+ "grad_norm": 0.8655261993408203,
+ "learning_rate": 4.99429967920797e-06,
+ "loss": 0.6646,
+ "step": 139
+ },
+ {
+ "epoch": 0.4384133611691023,
+ "grad_norm": 5.462070941925049,
+ "learning_rate": 4.994003725729992e-06,
+ "loss": 0.643,
+ "step": 140
+ },
+ {
+ "epoch": 0.44154488517745305,
+ "grad_norm": 2.1401469707489014,
+ "learning_rate": 4.993700291897695e-06,
+ "loss": 0.6639,
+ "step": 141
+ },
+ {
+ "epoch": 0.44467640918580376,
+ "grad_norm": 1.8219833374023438,
+ "learning_rate": 4.9933893786211815e-06,
+ "loss": 0.6673,
+ "step": 142
+ },
+ {
+ "epoch": 0.44780793319415446,
+ "grad_norm": 1.641079306602478,
+ "learning_rate": 4.993070986832984e-06,
+ "loss": 0.658,
+ "step": 143
+ },
+ {
+ "epoch": 0.4509394572025052,
+ "grad_norm": 1.1739819049835205,
+ "learning_rate": 4.992745117488066e-06,
+ "loss": 0.6826,
+ "step": 144
+ },
+ {
+ "epoch": 0.45407098121085593,
+ "grad_norm": 2.309185743331909,
+ "learning_rate": 4.9924117715638185e-06,
+ "loss": 0.6536,
+ "step": 145
+ },
+ {
+ "epoch": 0.4572025052192067,
+ "grad_norm": 1.09304940700531,
+ "learning_rate": 4.99207095006006e-06,
+ "loss": 0.721,
+ "step": 146
+ },
+ {
+ "epoch": 0.4603340292275574,
+ "grad_norm": 0.9056984186172485,
+ "learning_rate": 4.991722653999025e-06,
+ "loss": 0.7019,
+ "step": 147
+ },
+ {
+ "epoch": 0.46346555323590816,
+ "grad_norm": 1.8440625667572021,
+ "learning_rate": 4.991366884425374e-06,
+ "loss": 0.707,
+ "step": 148
+ },
+ {
+ "epoch": 0.46659707724425886,
+ "grad_norm": 1.2244676351547241,
+ "learning_rate": 4.991003642406177e-06,
+ "loss": 0.6407,
+ "step": 149
+ },
+ {
+ "epoch": 0.4697286012526096,
+ "grad_norm": 0.9258589744567871,
+ "learning_rate": 4.99063292903092e-06,
+ "loss": 0.6954,
+ "step": 150
+ },
+ {
+ "epoch": 0.47286012526096033,
+ "grad_norm": 4.176390647888184,
+ "learning_rate": 4.990254745411496e-06,
+ "loss": 0.6812,
+ "step": 151
+ },
+ {
+ "epoch": 0.4759916492693111,
+ "grad_norm": 1.4322530031204224,
+ "learning_rate": 4.989869092682205e-06,
+ "loss": 0.6808,
+ "step": 152
+ },
+ {
+ "epoch": 0.4791231732776618,
+ "grad_norm": 0.8017717003822327,
+ "learning_rate": 4.989475971999748e-06,
+ "loss": 0.687,
+ "step": 153
+ },
+ {
+ "epoch": 0.4822546972860125,
+ "grad_norm": 1.5641374588012695,
+ "learning_rate": 4.989075384543228e-06,
+ "loss": 0.6599,
+ "step": 154
+ },
+ {
+ "epoch": 0.48538622129436326,
+ "grad_norm": 1.1522141695022583,
+ "learning_rate": 4.98866733151414e-06,
+ "loss": 0.6546,
+ "step": 155
+ },
+ {
+ "epoch": 0.48851774530271397,
+ "grad_norm": 0.8593171238899231,
+ "learning_rate": 4.988251814136372e-06,
+ "loss": 0.6857,
+ "step": 156
+ },
+ {
+ "epoch": 0.49164926931106473,
+ "grad_norm": 2.668159246444702,
+ "learning_rate": 4.9878288336562e-06,
+ "loss": 0.661,
+ "step": 157
+ },
+ {
+ "epoch": 0.49478079331941544,
+ "grad_norm": 0.9953671097755432,
+ "learning_rate": 4.987398391342285e-06,
+ "loss": 0.6512,
+ "step": 158
+ },
+ {
+ "epoch": 0.4979123173277662,
+ "grad_norm": 1.042872667312622,
+ "learning_rate": 4.986960488485667e-06,
+ "loss": 0.6311,
+ "step": 159
+ },
+ {
+ "epoch": 0.5010438413361169,
+ "grad_norm": 0.9070663452148438,
+ "learning_rate": 4.9865151263997645e-06,
+ "loss": 0.675,
+ "step": 160
+ },
+ {
+ "epoch": 0.5041753653444676,
+ "grad_norm": 0.8460433483123779,
+ "learning_rate": 4.986062306420367e-06,
+ "loss": 0.6635,
+ "step": 161
+ },
+ {
+ "epoch": 0.5073068893528184,
+ "grad_norm": 1.2639834880828857,
+ "learning_rate": 4.985602029905635e-06,
+ "loss": 0.6327,
+ "step": 162
+ },
+ {
+ "epoch": 0.5104384133611691,
+ "grad_norm": 0.8775074481964111,
+ "learning_rate": 4.985134298236091e-06,
+ "loss": 0.644,
+ "step": 163
+ },
+ {
+ "epoch": 0.5135699373695198,
+ "grad_norm": 1.2031961679458618,
+ "learning_rate": 4.98465911281462e-06,
+ "loss": 0.6254,
+ "step": 164
+ },
+ {
+ "epoch": 0.5167014613778705,
+ "grad_norm": 0.892494797706604,
+ "learning_rate": 4.984176475066463e-06,
+ "loss": 0.7122,
+ "step": 165
+ },
+ {
+ "epoch": 0.5198329853862212,
+ "grad_norm": 2.7122485637664795,
+ "learning_rate": 4.983686386439212e-06,
+ "loss": 0.6679,
+ "step": 166
+ },
+ {
+ "epoch": 0.5229645093945721,
+ "grad_norm": 0.9344426989555359,
+ "learning_rate": 4.983188848402806e-06,
+ "loss": 0.6319,
+ "step": 167
+ },
+ {
+ "epoch": 0.5260960334029228,
+ "grad_norm": 1.4093577861785889,
+ "learning_rate": 4.982683862449531e-06,
+ "loss": 0.6425,
+ "step": 168
+ },
+ {
+ "epoch": 0.5292275574112735,
+ "grad_norm": 1.1285009384155273,
+ "learning_rate": 4.982171430094007e-06,
+ "loss": 0.6298,
+ "step": 169
+ },
+ {
+ "epoch": 0.5323590814196242,
+ "grad_norm": 1.952778935432434,
+ "learning_rate": 4.981651552873193e-06,
+ "loss": 0.7066,
+ "step": 170
+ },
+ {
+ "epoch": 0.535490605427975,
+ "grad_norm": 5.133765697479248,
+ "learning_rate": 4.981124232346374e-06,
+ "loss": 0.6634,
+ "step": 171
+ },
+ {
+ "epoch": 0.5386221294363257,
+ "grad_norm": 0.9770542979240417,
+ "learning_rate": 4.980589470095161e-06,
+ "loss": 0.7121,
+ "step": 172
+ },
+ {
+ "epoch": 0.5417536534446764,
+ "grad_norm": 0.8414323925971985,
+ "learning_rate": 4.980047267723487e-06,
+ "loss": 0.6397,
+ "step": 173
+ },
+ {
+ "epoch": 0.5448851774530271,
+ "grad_norm": 1.9173879623413086,
+ "learning_rate": 4.979497626857596e-06,
+ "loss": 0.6228,
+ "step": 174
+ },
+ {
+ "epoch": 0.5480167014613778,
+ "grad_norm": 1.0823363065719604,
+ "learning_rate": 4.978940549146048e-06,
+ "loss": 0.6475,
+ "step": 175
+ },
+ {
+ "epoch": 0.5511482254697286,
+ "grad_norm": 3.715353488922119,
+ "learning_rate": 4.978376036259706e-06,
+ "loss": 0.7127,
+ "step": 176
+ },
+ {
+ "epoch": 0.5542797494780793,
+ "grad_norm": 0.981584370136261,
+ "learning_rate": 4.9778040898917325e-06,
+ "loss": 0.6468,
+ "step": 177
+ },
+ {
+ "epoch": 0.55741127348643,
+ "grad_norm": 1.70566987991333,
+ "learning_rate": 4.977224711757587e-06,
+ "loss": 0.6476,
+ "step": 178
+ },
+ {
+ "epoch": 0.5605427974947808,
+ "grad_norm": 0.9217923283576965,
+ "learning_rate": 4.976637903595019e-06,
+ "loss": 0.6731,
+ "step": 179
+ },
+ {
+ "epoch": 0.5636743215031316,
+ "grad_norm": 0.8994677662849426,
+ "learning_rate": 4.976043667164063e-06,
+ "loss": 0.6562,
+ "step": 180
+ },
+ {
+ "epoch": 0.5668058455114823,
+ "grad_norm": 1.1613017320632935,
+ "learning_rate": 4.975442004247034e-06,
+ "loss": 0.6417,
+ "step": 181
+ },
+ {
+ "epoch": 0.569937369519833,
+ "grad_norm": 1.6041977405548096,
+ "learning_rate": 4.974832916648521e-06,
+ "loss": 0.6029,
+ "step": 182
+ },
+ {
+ "epoch": 0.5730688935281837,
+ "grad_norm": 1.7978405952453613,
+ "learning_rate": 4.974216406195383e-06,
+ "loss": 0.6269,
+ "step": 183
+ },
+ {
+ "epoch": 0.5762004175365344,
+ "grad_norm": 1.6021920442581177,
+ "learning_rate": 4.973592474736739e-06,
+ "loss": 0.6149,
+ "step": 184
+ },
+ {
+ "epoch": 0.5793319415448852,
+ "grad_norm": 0.8973568677902222,
+ "learning_rate": 4.972961124143971e-06,
+ "loss": 0.6648,
+ "step": 185
+ },
+ {
+ "epoch": 0.5824634655532359,
+ "grad_norm": 1.9432591199874878,
+ "learning_rate": 4.972322356310711e-06,
+ "loss": 0.6299,
+ "step": 186
+ },
+ {
+ "epoch": 0.5855949895615866,
+ "grad_norm": 4.457028388977051,
+ "learning_rate": 4.971676173152839e-06,
+ "loss": 0.656,
+ "step": 187
+ },
+ {
+ "epoch": 0.5887265135699373,
+ "grad_norm": 2.0989716053009033,
+ "learning_rate": 4.971022576608473e-06,
+ "loss": 0.6539,
+ "step": 188
+ },
+ {
+ "epoch": 0.5918580375782881,
+ "grad_norm": 1.0646967887878418,
+ "learning_rate": 4.97036156863797e-06,
+ "loss": 0.6727,
+ "step": 189
+ },
+ {
+ "epoch": 0.5949895615866388,
+ "grad_norm": 1.6522265672683716,
+ "learning_rate": 4.969693151223914e-06,
+ "loss": 0.6643,
+ "step": 190
+ },
+ {
+ "epoch": 0.5981210855949896,
+ "grad_norm": 1.7503505945205688,
+ "learning_rate": 4.969017326371115e-06,
+ "loss": 0.6402,
+ "step": 191
+ },
+ {
+ "epoch": 0.6012526096033403,
+ "grad_norm": 1.2341989278793335,
+ "learning_rate": 4.968334096106597e-06,
+ "loss": 0.6413,
+ "step": 192
+ },
+ {
+ "epoch": 0.6043841336116911,
+ "grad_norm": 3.089054584503174,
+ "learning_rate": 4.967643462479597e-06,
+ "loss": 0.6825,
+ "step": 193
+ },
+ {
+ "epoch": 0.6075156576200418,
+ "grad_norm": 2.711623430252075,
+ "learning_rate": 4.966945427561557e-06,
+ "loss": 0.65,
+ "step": 194
+ },
+ {
+ "epoch": 0.6106471816283925,
+ "grad_norm": 4.641184329986572,
+ "learning_rate": 4.966239993446118e-06,
+ "loss": 0.6229,
+ "step": 195
+ },
+ {
+ "epoch": 0.6137787056367432,
+ "grad_norm": 1.7984074354171753,
+ "learning_rate": 4.965527162249114e-06,
+ "loss": 0.6473,
+ "step": 196
+ },
+ {
+ "epoch": 0.6169102296450939,
+ "grad_norm": 1.1643115282058716,
+ "learning_rate": 4.964806936108566e-06,
+ "loss": 0.6404,
+ "step": 197
+ },
+ {
+ "epoch": 0.6200417536534447,
+ "grad_norm": 2.1877920627593994,
+ "learning_rate": 4.9640793171846725e-06,
+ "loss": 0.6185,
+ "step": 198
+ },
+ {
+ "epoch": 0.6231732776617954,
+ "grad_norm": 1.7970566749572754,
+ "learning_rate": 4.963344307659807e-06,
+ "loss": 0.634,
+ "step": 199
+ },
+ {
+ "epoch": 0.6263048016701461,
+ "grad_norm": 1.6014361381530762,
+ "learning_rate": 4.96260190973851e-06,
+ "loss": 0.6562,
+ "step": 200
+ },
+ {
+ "epoch": 0.6294363256784968,
+ "grad_norm": 0.8743320107460022,
+ "learning_rate": 4.961852125647482e-06,
+ "loss": 0.6133,
+ "step": 201
+ },
+ {
+ "epoch": 0.6325678496868476,
+ "grad_norm": 1.9526551961898804,
+ "learning_rate": 4.961094957635578e-06,
+ "loss": 0.6451,
+ "step": 202
+ },
+ {
+ "epoch": 0.6356993736951984,
+ "grad_norm": 3.6597347259521484,
+ "learning_rate": 4.960330407973798e-06,
+ "loss": 0.6386,
+ "step": 203
+ },
+ {
+ "epoch": 0.6388308977035491,
+ "grad_norm": 1.7180207967758179,
+ "learning_rate": 4.959558478955283e-06,
+ "loss": 0.6688,
+ "step": 204
+ },
+ {
+ "epoch": 0.6419624217118998,
+ "grad_norm": 0.9058470129966736,
+ "learning_rate": 4.958779172895308e-06,
+ "loss": 0.6161,
+ "step": 205
+ },
+ {
+ "epoch": 0.6450939457202505,
+ "grad_norm": 1.0031033754348755,
+ "learning_rate": 4.957992492131274e-06,
+ "loss": 0.6437,
+ "step": 206
+ },
+ {
+ "epoch": 0.6482254697286013,
+ "grad_norm": 1.5846725702285767,
+ "learning_rate": 4.9571984390226985e-06,
+ "loss": 0.6332,
+ "step": 207
+ },
+ {
+ "epoch": 0.651356993736952,
+ "grad_norm": 1.9951609373092651,
+ "learning_rate": 4.956397015951215e-06,
+ "loss": 0.636,
+ "step": 208
+ },
+ {
+ "epoch": 0.6544885177453027,
+ "grad_norm": 1.4122583866119385,
+ "learning_rate": 4.95558822532056e-06,
+ "loss": 0.6586,
+ "step": 209
+ },
+ {
+ "epoch": 0.6576200417536534,
+ "grad_norm": 1.2243481874465942,
+ "learning_rate": 4.954772069556568e-06,
+ "loss": 0.6313,
+ "step": 210
+ },
+ {
+ "epoch": 0.6607515657620042,
+ "grad_norm": 0.8756356835365295,
+ "learning_rate": 4.953948551107164e-06,
+ "loss": 0.6406,
+ "step": 211
+ },
+ {
+ "epoch": 0.6638830897703549,
+ "grad_norm": 2.9979734420776367,
+ "learning_rate": 4.953117672442356e-06,
+ "loss": 0.5803,
+ "step": 212
+ },
+ {
+ "epoch": 0.6670146137787056,
+ "grad_norm": 2.1859359741210938,
+ "learning_rate": 4.952279436054229e-06,
+ "loss": 0.6607,
+ "step": 213
+ },
+ {
+ "epoch": 0.6701461377870563,
+ "grad_norm": 0.6929755806922913,
+ "learning_rate": 4.9514338444569346e-06,
+ "loss": 0.5989,
+ "step": 214
+ },
+ {
+ "epoch": 0.673277661795407,
+ "grad_norm": 1.0361783504486084,
+ "learning_rate": 4.950580900186685e-06,
+ "loss": 0.6654,
+ "step": 215
+ },
+ {
+ "epoch": 0.6764091858037579,
+ "grad_norm": 1.210898518562317,
+ "learning_rate": 4.9497206058017475e-06,
+ "loss": 0.6213,
+ "step": 216
+ },
+ {
+ "epoch": 0.6795407098121086,
+ "grad_norm": 1.200990080833435,
+ "learning_rate": 4.948852963882434e-06,
+ "loss": 0.6654,
+ "step": 217
+ },
+ {
+ "epoch": 0.6826722338204593,
+ "grad_norm": 1.481831669807434,
+ "learning_rate": 4.947977977031093e-06,
+ "loss": 0.6474,
+ "step": 218
+ },
+ {
+ "epoch": 0.68580375782881,
+ "grad_norm": 0.9883334636688232,
+ "learning_rate": 4.947095647872103e-06,
+ "loss": 0.6735,
+ "step": 219
+ },
+ {
+ "epoch": 0.6889352818371608,
+ "grad_norm": 0.7436536550521851,
+ "learning_rate": 4.946205979051868e-06,
+ "loss": 0.6456,
+ "step": 220
+ },
+ {
+ "epoch": 0.6920668058455115,
+ "grad_norm": 0.9057570099830627,
+ "learning_rate": 4.945308973238802e-06,
+ "loss": 0.6228,
+ "step": 221
+ },
+ {
+ "epoch": 0.6951983298538622,
+ "grad_norm": 1.341081142425537,
+ "learning_rate": 4.944404633123324e-06,
+ "loss": 0.6417,
+ "step": 222
+ },
+ {
+ "epoch": 0.6983298538622129,
+ "grad_norm": 0.7958157062530518,
+ "learning_rate": 4.943492961417859e-06,
+ "loss": 0.6494,
+ "step": 223
+ },
+ {
+ "epoch": 0.7014613778705637,
+ "grad_norm": 1.216025471687317,
+ "learning_rate": 4.9425739608568106e-06,
+ "loss": 0.6566,
+ "step": 224
+ },
+ {
+ "epoch": 0.7045929018789144,
+ "grad_norm": 0.9774854779243469,
+ "learning_rate": 4.9416476341965735e-06,
+ "loss": 0.6171,
+ "step": 225
+ },
+ {
+ "epoch": 0.7077244258872651,
+ "grad_norm": 2.1562681198120117,
+ "learning_rate": 4.940713984215512e-06,
+ "loss": 0.629,
+ "step": 226
+ },
+ {
+ "epoch": 0.7108559498956158,
+ "grad_norm": 1.9521286487579346,
+ "learning_rate": 4.9397730137139556e-06,
+ "loss": 0.6475,
+ "step": 227
+ },
+ {
+ "epoch": 0.7139874739039666,
+ "grad_norm": 1.5749104022979736,
+ "learning_rate": 4.9388247255141895e-06,
+ "loss": 0.6053,
+ "step": 228
+ },
+ {
+ "epoch": 0.7171189979123174,
+ "grad_norm": 1.2008254528045654,
+ "learning_rate": 4.937869122460449e-06,
+ "loss": 0.6052,
+ "step": 229
+ },
+ {
+ "epoch": 0.7202505219206681,
+ "grad_norm": 1.0774102210998535,
+ "learning_rate": 4.93690620741891e-06,
+ "loss": 0.6099,
+ "step": 230
+ },
+ {
+ "epoch": 0.7233820459290188,
+ "grad_norm": 1.0929996967315674,
+ "learning_rate": 4.935935983277675e-06,
+ "loss": 0.6363,
+ "step": 231
+ },
+ {
+ "epoch": 0.7265135699373695,
+ "grad_norm": 0.8830653429031372,
+ "learning_rate": 4.934958452946774e-06,
+ "loss": 0.6136,
+ "step": 232
+ },
+ {
+ "epoch": 0.7296450939457203,
+ "grad_norm": 3.591218948364258,
+ "learning_rate": 4.933973619358147e-06,
+ "loss": 0.5962,
+ "step": 233
+ },
+ {
+ "epoch": 0.732776617954071,
+ "grad_norm": 2.5797672271728516,
+ "learning_rate": 4.932981485465643e-06,
+ "loss": 0.6405,
+ "step": 234
+ },
+ {
+ "epoch": 0.7359081419624217,
+ "grad_norm": 1.0467664003372192,
+ "learning_rate": 4.9319820542450025e-06,
+ "loss": 0.6155,
+ "step": 235
+ },
+ {
+ "epoch": 0.7390396659707724,
+ "grad_norm": 0.8099795579910278,
+ "learning_rate": 4.930975328693856e-06,
+ "loss": 0.5615,
+ "step": 236
+ },
+ {
+ "epoch": 0.7421711899791231,
+ "grad_norm": 0.8906702995300293,
+ "learning_rate": 4.92996131183171e-06,
+ "loss": 0.6501,
+ "step": 237
+ },
+ {
+ "epoch": 0.7453027139874739,
+ "grad_norm": 1.0871416330337524,
+ "learning_rate": 4.928940006699944e-06,
+ "loss": 0.6282,
+ "step": 238
+ },
+ {
+ "epoch": 0.7484342379958246,
+ "grad_norm": 1.3209614753723145,
+ "learning_rate": 4.927911416361792e-06,
+ "loss": 0.598,
+ "step": 239
+ },
+ {
+ "epoch": 0.7515657620041754,
+ "grad_norm": 1.2252682447433472,
+ "learning_rate": 4.926875543902344e-06,
+ "loss": 0.6433,
+ "step": 240
+ },
+ {
+ "epoch": 0.7546972860125261,
+ "grad_norm": 1.0569007396697998,
+ "learning_rate": 4.9258323924285285e-06,
+ "loss": 0.5927,
+ "step": 241
+ },
+ {
+ "epoch": 0.7578288100208769,
+ "grad_norm": 0.9309014081954956,
+ "learning_rate": 4.924781965069106e-06,
+ "loss": 0.5927,
+ "step": 242
+ },
+ {
+ "epoch": 0.7609603340292276,
+ "grad_norm": 1.0200378894805908,
+ "learning_rate": 4.923724264974662e-06,
+ "loss": 0.6064,
+ "step": 243
+ },
+ {
+ "epoch": 0.7640918580375783,
+ "grad_norm": 1.0533075332641602,
+ "learning_rate": 4.922659295317593e-06,
+ "loss": 0.6373,
+ "step": 244
+ },
+ {
+ "epoch": 0.767223382045929,
+ "grad_norm": 0.7889382839202881,
+ "learning_rate": 4.921587059292102e-06,
+ "loss": 0.5887,
+ "step": 245
+ },
+ {
+ "epoch": 0.7703549060542797,
+ "grad_norm": 0.7943588495254517,
+ "learning_rate": 4.920507560114183e-06,
+ "loss": 0.593,
+ "step": 246
+ },
+ {
+ "epoch": 0.7734864300626305,
+ "grad_norm": 0.8247205018997192,
+ "learning_rate": 4.919420801021617e-06,
+ "loss": 0.6151,
+ "step": 247
+ },
+ {
+ "epoch": 0.7766179540709812,
+ "grad_norm": 0.9979158043861389,
+ "learning_rate": 4.91832678527396e-06,
+ "loss": 0.6019,
+ "step": 248
+ },
+ {
+ "epoch": 0.7797494780793319,
+ "grad_norm": 0.9346868991851807,
+ "learning_rate": 4.917225516152532e-06,
+ "loss": 0.6098,
+ "step": 249
+ },
+ {
+ "epoch": 0.7828810020876826,
+ "grad_norm": 0.7487881183624268,
+ "learning_rate": 4.916116996960408e-06,
+ "loss": 0.5965,
+ "step": 250
+ },
+ {
+ "epoch": 0.7860125260960334,
+ "grad_norm": 0.821576714515686,
+ "learning_rate": 4.915001231022411e-06,
+ "loss": 0.6483,
+ "step": 251
+ },
+ {
+ "epoch": 0.7891440501043842,
+ "grad_norm": 1.0413196086883545,
+ "learning_rate": 4.913878221685096e-06,
+ "loss": 0.6108,
+ "step": 252
+ },
+ {
+ "epoch": 0.7922755741127349,
+ "grad_norm": 0.9560331702232361,
+ "learning_rate": 4.912747972316745e-06,
+ "loss": 0.5758,
+ "step": 253
+ },
+ {
+ "epoch": 0.7954070981210856,
+ "grad_norm": 0.8964638113975525,
+ "learning_rate": 4.911610486307356e-06,
+ "loss": 0.6432,
+ "step": 254
+ },
+ {
+ "epoch": 0.7985386221294363,
+ "grad_norm": 0.8418346047401428,
+ "learning_rate": 4.910465767068631e-06,
+ "loss": 0.6027,
+ "step": 255
+ },
+ {
+ "epoch": 0.8016701461377871,
+ "grad_norm": 1.792371153831482,
+ "learning_rate": 4.909313818033966e-06,
+ "loss": 0.6198,
+ "step": 256
+ },
+ {
+ "epoch": 0.8048016701461378,
+ "grad_norm": 1.036665439605713,
+ "learning_rate": 4.908154642658446e-06,
+ "loss": 0.6255,
+ "step": 257
+ },
+ {
+ "epoch": 0.8079331941544885,
+ "grad_norm": 0.7592151165008545,
+ "learning_rate": 4.906988244418823e-06,
+ "loss": 0.6035,
+ "step": 258
+ },
+ {
+ "epoch": 0.8110647181628392,
+ "grad_norm": 0.8843073844909668,
+ "learning_rate": 4.90581462681352e-06,
+ "loss": 0.6299,
+ "step": 259
+ },
+ {
+ "epoch": 0.81419624217119,
+ "grad_norm": 0.9489964246749878,
+ "learning_rate": 4.9046337933626086e-06,
+ "loss": 0.5869,
+ "step": 260
+ },
+ {
+ "epoch": 0.8173277661795407,
+ "grad_norm": 0.851691722869873,
+ "learning_rate": 4.903445747607806e-06,
+ "loss": 0.603,
+ "step": 261
+ },
+ {
+ "epoch": 0.8204592901878914,
+ "grad_norm": 1.3722106218338013,
+ "learning_rate": 4.902250493112458e-06,
+ "loss": 0.5939,
+ "step": 262
+ },
+ {
+ "epoch": 0.8235908141962421,
+ "grad_norm": 1.1002827882766724,
+ "learning_rate": 4.901048033461537e-06,
+ "loss": 0.6452,
+ "step": 263
+ },
+ {
+ "epoch": 0.826722338204593,
+ "grad_norm": 0.8428632020950317,
+ "learning_rate": 4.89983837226162e-06,
+ "loss": 0.5956,
+ "step": 264
+ },
+ {
+ "epoch": 0.8298538622129437,
+ "grad_norm": 0.7666584849357605,
+ "learning_rate": 4.898621513140889e-06,
+ "loss": 0.6067,
+ "step": 265
+ },
+ {
+ "epoch": 0.8329853862212944,
+ "grad_norm": 0.8413611054420471,
+ "learning_rate": 4.897397459749113e-06,
+ "loss": 0.5985,
+ "step": 266
+ },
+ {
+ "epoch": 0.8361169102296451,
+ "grad_norm": 2.3374335765838623,
+ "learning_rate": 4.896166215757638e-06,
+ "loss": 0.5885,
+ "step": 267
+ },
+ {
+ "epoch": 0.8392484342379958,
+ "grad_norm": 2.236640214920044,
+ "learning_rate": 4.894927784859377e-06,
+ "loss": 0.6408,
+ "step": 268
+ },
+ {
+ "epoch": 0.8423799582463466,
+ "grad_norm": 0.9715856313705444,
+ "learning_rate": 4.893682170768802e-06,
+ "loss": 0.5954,
+ "step": 269
+ },
+ {
+ "epoch": 0.8455114822546973,
+ "grad_norm": 1.0249912738800049,
+ "learning_rate": 4.892429377221928e-06,
+ "loss": 0.6186,
+ "step": 270
+ },
+ {
+ "epoch": 0.848643006263048,
+ "grad_norm": 1.255426049232483,
+ "learning_rate": 4.891169407976302e-06,
+ "loss": 0.6351,
+ "step": 271
+ },
+ {
+ "epoch": 0.8517745302713987,
+ "grad_norm": 0.9339559674263,
+ "learning_rate": 4.889902266810995e-06,
+ "loss": 0.5944,
+ "step": 272
+ },
+ {
+ "epoch": 0.8549060542797495,
+ "grad_norm": 1.2473429441452026,
+ "learning_rate": 4.888627957526589e-06,
+ "loss": 0.544,
+ "step": 273
+ },
+ {
+ "epoch": 0.8580375782881002,
+ "grad_norm": 1.0589442253112793,
+ "learning_rate": 4.887346483945166e-06,
+ "loss": 0.5543,
+ "step": 274
+ },
+ {
+ "epoch": 0.8611691022964509,
+ "grad_norm": 0.9844024777412415,
+ "learning_rate": 4.886057849910294e-06,
+ "loss": 0.5941,
+ "step": 275
+ },
+ {
+ "epoch": 0.8643006263048016,
+ "grad_norm": 2.88578200340271,
+ "learning_rate": 4.8847620592870196e-06,
+ "loss": 0.6124,
+ "step": 276
+ },
+ {
+ "epoch": 0.8674321503131524,
+ "grad_norm": 0.7496054172515869,
+ "learning_rate": 4.8834591159618524e-06,
+ "loss": 0.6006,
+ "step": 277
+ },
+ {
+ "epoch": 0.8705636743215032,
+ "grad_norm": 0.7403052449226379,
+ "learning_rate": 4.88214902384276e-06,
+ "loss": 0.5911,
+ "step": 278
+ },
+ {
+ "epoch": 0.8736951983298539,
+ "grad_norm": 0.9003771543502808,
+ "learning_rate": 4.880831786859146e-06,
+ "loss": 0.6347,
+ "step": 279
+ },
+ {
+ "epoch": 0.8768267223382046,
+ "grad_norm": 1.0345501899719238,
+ "learning_rate": 4.879507408961847e-06,
+ "loss": 0.6111,
+ "step": 280
+ },
+ {
+ "epoch": 0.8799582463465553,
+ "grad_norm": 1.4385879039764404,
+ "learning_rate": 4.878175894123116e-06,
+ "loss": 0.6454,
+ "step": 281
+ },
+ {
+ "epoch": 0.8830897703549061,
+ "grad_norm": 0.8469482064247131,
+ "learning_rate": 4.8768372463366145e-06,
+ "loss": 0.6163,
+ "step": 282
+ },
+ {
+ "epoch": 0.8862212943632568,
+ "grad_norm": 0.8859589695930481,
+ "learning_rate": 4.875491469617395e-06,
+ "loss": 0.6144,
+ "step": 283
+ },
+ {
+ "epoch": 0.8893528183716075,
+ "grad_norm": 1.8436834812164307,
+ "learning_rate": 4.874138568001895e-06,
+ "loss": 0.6275,
+ "step": 284
+ },
+ {
+ "epoch": 0.8924843423799582,
+ "grad_norm": 0.6646101474761963,
+ "learning_rate": 4.87277854554792e-06,
+ "loss": 0.615,
+ "step": 285
+ },
+ {
+ "epoch": 0.8956158663883089,
+ "grad_norm": 1.0070925951004028,
+ "learning_rate": 4.871411406334633e-06,
+ "loss": 0.5898,
+ "step": 286
+ },
+ {
+ "epoch": 0.8987473903966597,
+ "grad_norm": 0.9785194993019104,
+ "learning_rate": 4.870037154462545e-06,
+ "loss": 0.5992,
+ "step": 287
+ },
+ {
+ "epoch": 0.9018789144050104,
+ "grad_norm": 0.7244889736175537,
+ "learning_rate": 4.868655794053497e-06,
+ "loss": 0.6078,
+ "step": 288
+ },
+ {
+ "epoch": 0.9050104384133612,
+ "grad_norm": 1.4496444463729858,
+ "learning_rate": 4.8672673292506535e-06,
+ "loss": 0.5855,
+ "step": 289
+ },
+ {
+ "epoch": 0.9081419624217119,
+ "grad_norm": 1.8514957427978516,
+ "learning_rate": 4.865871764218486e-06,
+ "loss": 0.5707,
+ "step": 290
+ },
+ {
+ "epoch": 0.9112734864300627,
+ "grad_norm": 0.8439773321151733,
+ "learning_rate": 4.864469103142763e-06,
+ "loss": 0.5562,
+ "step": 291
+ },
+ {
+ "epoch": 0.9144050104384134,
+ "grad_norm": 0.8146086931228638,
+ "learning_rate": 4.8630593502305355e-06,
+ "loss": 0.6161,
+ "step": 292
+ },
+ {
+ "epoch": 0.9175365344467641,
+ "grad_norm": 0.8920315504074097,
+ "learning_rate": 4.861642509710126e-06,
+ "loss": 0.6139,
+ "step": 293
+ },
+ {
+ "epoch": 0.9206680584551148,
+ "grad_norm": 1.4980088472366333,
+ "learning_rate": 4.860218585831116e-06,
+ "loss": 0.6187,
+ "step": 294
+ },
+ {
+ "epoch": 0.9237995824634656,
+ "grad_norm": 0.9910127520561218,
+ "learning_rate": 4.8587875828643285e-06,
+ "loss": 0.5852,
+ "step": 295
+ },
+ {
+ "epoch": 0.9269311064718163,
+ "grad_norm": 0.819600522518158,
+ "learning_rate": 4.857349505101823e-06,
+ "loss": 0.6172,
+ "step": 296
+ },
+ {
+ "epoch": 0.930062630480167,
+ "grad_norm": 1.1059772968292236,
+ "learning_rate": 4.855904356856878e-06,
+ "loss": 0.5868,
+ "step": 297
+ },
+ {
+ "epoch": 0.9331941544885177,
+ "grad_norm": 1.2362196445465088,
+ "learning_rate": 4.854452142463977e-06,
+ "loss": 0.625,
+ "step": 298
+ },
+ {
+ "epoch": 0.9363256784968684,
+ "grad_norm": 0.9956470727920532,
+ "learning_rate": 4.852992866278799e-06,
+ "loss": 0.5923,
+ "step": 299
+ },
+ {
+ "epoch": 0.9394572025052192,
+ "grad_norm": 0.864109218120575,
+ "learning_rate": 4.851526532678203e-06,
+ "loss": 0.6315,
+ "step": 300
+ },
+ {
+ "epoch": 0.94258872651357,
+ "grad_norm": 0.8900614380836487,
+ "learning_rate": 4.850053146060217e-06,
+ "loss": 0.6128,
+ "step": 301
+ },
+ {
+ "epoch": 0.9457202505219207,
+ "grad_norm": 0.927254855632782,
+ "learning_rate": 4.84857271084402e-06,
+ "loss": 0.5955,
+ "step": 302
+ },
+ {
+ "epoch": 0.9488517745302714,
+ "grad_norm": 1.0046517848968506,
+ "learning_rate": 4.847085231469935e-06,
+ "loss": 0.6134,
+ "step": 303
+ },
+ {
+ "epoch": 0.9519832985386222,
+ "grad_norm": 0.734597384929657,
+ "learning_rate": 4.8455907123994125e-06,
+ "loss": 0.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.9551148225469729,
+ "grad_norm": 0.7338348031044006,
+ "learning_rate": 4.844089158115016e-06,
+ "loss": 0.5897,
+ "step": 305
+ },
+ {
+ "epoch": 0.9582463465553236,
+ "grad_norm": 0.9163988828659058,
+ "learning_rate": 4.8425805731204106e-06,
+ "loss": 0.6051,
+ "step": 306
+ },
+ {
+ "epoch": 0.9613778705636743,
+ "grad_norm": 1.050246238708496,
+ "learning_rate": 4.84106496194035e-06,
+ "loss": 0.5751,
+ "step": 307
+ },
+ {
+ "epoch": 0.964509394572025,
+ "grad_norm": 0.7637603878974915,
+ "learning_rate": 4.83954232912066e-06,
+ "loss": 0.5677,
+ "step": 308
+ },
+ {
+ "epoch": 0.9676409185803758,
+ "grad_norm": 0.7110525965690613,
+ "learning_rate": 4.838012679228229e-06,
+ "loss": 0.6051,
+ "step": 309
+ },
+ {
+ "epoch": 0.9707724425887265,
+ "grad_norm": 0.7662068605422974,
+ "learning_rate": 4.836476016850988e-06,
+ "loss": 0.59,
+ "step": 310
+ },
+ {
+ "epoch": 0.9739039665970772,
+ "grad_norm": 0.8907375335693359,
+ "learning_rate": 4.834932346597906e-06,
+ "loss": 0.5792,
+ "step": 311
+ },
+ {
+ "epoch": 0.9770354906054279,
+ "grad_norm": 0.8939849138259888,
+ "learning_rate": 4.833381673098966e-06,
+ "loss": 0.6062,
+ "step": 312
+ },
+ {
+ "epoch": 0.9801670146137788,
+ "grad_norm": 0.8878788948059082,
+ "learning_rate": 4.8318240010051595e-06,
+ "loss": 0.5694,
+ "step": 313
+ },
+ {
+ "epoch": 0.9832985386221295,
+ "grad_norm": 1.2523870468139648,
+ "learning_rate": 4.830259334988468e-06,
+ "loss": 0.5809,
+ "step": 314
+ },
+ {
+ "epoch": 0.9864300626304802,
+ "grad_norm": 1.0836797952651978,
+ "learning_rate": 4.82868767974185e-06,
+ "loss": 0.5949,
+ "step": 315
+ },
+ {
+ "epoch": 0.9895615866388309,
+ "grad_norm": 0.7985473871231079,
+ "learning_rate": 4.827109039979226e-06,
+ "loss": 0.6057,
+ "step": 316
+ },
+ {
+ "epoch": 0.9926931106471816,
+ "grad_norm": 1.042951226234436,
+ "learning_rate": 4.825523420435469e-06,
+ "loss": 0.6004,
+ "step": 317
+ },
+ {
+ "epoch": 0.9958246346555324,
+ "grad_norm": 0.7845115661621094,
+ "learning_rate": 4.823930825866381e-06,
+ "loss": 0.6161,
+ "step": 318
+ },
+ {
+ "epoch": 0.9989561586638831,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.82233126104869e-06,
+ "loss": 0.5912,
+ "step": 319
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.8207247307800275e-06,
+ "loss": 0.1914,
+ "step": 320
+ },
+ {
+ "epoch": 1.0031315240083507,
+ "grad_norm": 0.751028835773468,
+ "learning_rate": 4.819111239878916e-06,
+ "loss": 0.5802,
+ "step": 321
+ },
+ {
+ "epoch": 1.0062630480167014,
+ "grad_norm": 1.4943569898605347,
+ "learning_rate": 4.817490793184758e-06,
+ "loss": 0.613,
+ "step": 322
+ },
+ {
+ "epoch": 1.0093945720250521,
+ "grad_norm": 2.296318531036377,
+ "learning_rate": 4.815863395557816e-06,
+ "loss": 0.5453,
+ "step": 323
+ },
+ {
+ "epoch": 1.0125260960334028,
+ "grad_norm": 0.760101318359375,
+ "learning_rate": 4.814229051879202e-06,
+ "loss": 0.5302,
+ "step": 324
+ },
+ {
+ "epoch": 1.0156576200417538,
+ "grad_norm": 0.8145846128463745,
+ "learning_rate": 4.812587767050861e-06,
+ "loss": 0.5831,
+ "step": 325
+ },
+ {
+ "epoch": 1.0187891440501045,
+ "grad_norm": 0.9169796109199524,
+ "learning_rate": 4.8109395459955565e-06,
+ "loss": 0.5756,
+ "step": 326
+ },
+ {
+ "epoch": 1.0219206680584552,
+ "grad_norm": 0.8791524171829224,
+ "learning_rate": 4.809284393656858e-06,
+ "loss": 0.5988,
+ "step": 327
+ },
+ {
+ "epoch": 1.0250521920668059,
+ "grad_norm": 1.0184170007705688,
+ "learning_rate": 4.807622314999122e-06,
+ "loss": 0.5476,
+ "step": 328
+ },
+ {
+ "epoch": 1.0281837160751566,
+ "grad_norm": 0.8095184564590454,
+ "learning_rate": 4.8059533150074805e-06,
+ "loss": 0.5723,
+ "step": 329
+ },
+ {
+ "epoch": 1.0313152400835073,
+ "grad_norm": 0.7621930241584778,
+ "learning_rate": 4.804277398687826e-06,
+ "loss": 0.5841,
+ "step": 330
+ },
+ {
+ "epoch": 1.034446764091858,
+ "grad_norm": 3.729628324508667,
+ "learning_rate": 4.802594571066791e-06,
+ "loss": 0.5639,
+ "step": 331
+ },
+ {
+ "epoch": 1.0375782881002087,
+ "grad_norm": 1.6502974033355713,
+ "learning_rate": 4.800904837191743e-06,
+ "loss": 0.6024,
+ "step": 332
+ },
+ {
+ "epoch": 1.0407098121085594,
+ "grad_norm": 0.8031198978424072,
+ "learning_rate": 4.799208202130762e-06,
+ "loss": 0.5305,
+ "step": 333
+ },
+ {
+ "epoch": 1.0438413361169103,
+ "grad_norm": 0.939644992351532,
+ "learning_rate": 4.797504670972623e-06,
+ "loss": 0.5446,
+ "step": 334
+ },
+ {
+ "epoch": 1.046972860125261,
+ "grad_norm": 1.0589954853057861,
+ "learning_rate": 4.795794248826789e-06,
+ "loss": 0.5366,
+ "step": 335
+ },
+ {
+ "epoch": 1.0501043841336117,
+ "grad_norm": 0.9089614748954773,
+ "learning_rate": 4.794076940823391e-06,
+ "loss": 0.5795,
+ "step": 336
+ },
+ {
+ "epoch": 1.0532359081419624,
+ "grad_norm": 0.7732561230659485,
+ "learning_rate": 4.792352752113212e-06,
+ "loss": 0.5765,
+ "step": 337
+ },
+ {
+ "epoch": 1.0563674321503131,
+ "grad_norm": 1.811553955078125,
+ "learning_rate": 4.790621687867672e-06,
+ "loss": 0.561,
+ "step": 338
+ },
+ {
+ "epoch": 1.0594989561586639,
+ "grad_norm": 1.1930758953094482,
+ "learning_rate": 4.788883753278813e-06,
+ "loss": 0.5,
+ "step": 339
+ },
+ {
+ "epoch": 1.0626304801670146,
+ "grad_norm": 0.9551813006401062,
+ "learning_rate": 4.787138953559285e-06,
+ "loss": 0.5228,
+ "step": 340
+ },
+ {
+ "epoch": 1.0657620041753653,
+ "grad_norm": 0.9609586596488953,
+ "learning_rate": 4.785387293942329e-06,
+ "loss": 0.5827,
+ "step": 341
+ },
+ {
+ "epoch": 1.068893528183716,
+ "grad_norm": 0.8403449654579163,
+ "learning_rate": 4.783628779681759e-06,
+ "loss": 0.5585,
+ "step": 342
+ },
+ {
+ "epoch": 1.072025052192067,
+ "grad_norm": 0.9108251929283142,
+ "learning_rate": 4.7818634160519496e-06,
+ "loss": 0.6077,
+ "step": 343
+ },
+ {
+ "epoch": 1.0751565762004176,
+ "grad_norm": 0.9476898908615112,
+ "learning_rate": 4.780091208347819e-06,
+ "loss": 0.5493,
+ "step": 344
+ },
+ {
+ "epoch": 1.0782881002087683,
+ "grad_norm": 1.1943707466125488,
+ "learning_rate": 4.778312161884813e-06,
+ "loss": 0.5736,
+ "step": 345
+ },
+ {
+ "epoch": 1.081419624217119,
+ "grad_norm": 3.1342639923095703,
+ "learning_rate": 4.77652628199889e-06,
+ "loss": 0.5765,
+ "step": 346
+ },
+ {
+ "epoch": 1.0845511482254697,
+ "grad_norm": 2.7982125282287598,
+ "learning_rate": 4.7747335740465015e-06,
+ "loss": 0.6003,
+ "step": 347
+ },
+ {
+ "epoch": 1.0876826722338204,
+ "grad_norm": 1.5068914890289307,
+ "learning_rate": 4.7729340434045815e-06,
+ "loss": 0.5033,
+ "step": 348
+ },
+ {
+ "epoch": 1.0908141962421711,
+ "grad_norm": 0.8273429274559021,
+ "learning_rate": 4.771127695470527e-06,
+ "loss": 0.5309,
+ "step": 349
+ },
+ {
+ "epoch": 1.0939457202505218,
+ "grad_norm": 1.104974389076233,
+ "learning_rate": 4.76931453566218e-06,
+ "loss": 0.5244,
+ "step": 350
+ },
+ {
+ "epoch": 1.0970772442588728,
+ "grad_norm": 1.096509337425232,
+ "learning_rate": 4.7674945694178166e-06,
+ "loss": 0.5585,
+ "step": 351
+ },
+ {
+ "epoch": 1.1002087682672235,
+ "grad_norm": 1.0238200426101685,
+ "learning_rate": 4.765667802196127e-06,
+ "loss": 0.5589,
+ "step": 352
+ },
+ {
+ "epoch": 1.1033402922755742,
+ "grad_norm": 0.7515526413917542,
+ "learning_rate": 4.763834239476197e-06,
+ "loss": 0.5304,
+ "step": 353
+ },
+ {
+ "epoch": 1.1064718162839249,
+ "grad_norm": 1.0282566547393799,
+ "learning_rate": 4.761993886757499e-06,
+ "loss": 0.5476,
+ "step": 354
+ },
+ {
+ "epoch": 1.1096033402922756,
+ "grad_norm": 0.9962708950042725,
+ "learning_rate": 4.760146749559868e-06,
+ "loss": 0.5117,
+ "step": 355
+ },
+ {
+ "epoch": 1.1127348643006263,
+ "grad_norm": 0.7851671576499939,
+ "learning_rate": 4.758292833423488e-06,
+ "loss": 0.5542,
+ "step": 356
+ },
+ {
+ "epoch": 1.115866388308977,
+ "grad_norm": 0.8857759237289429,
+ "learning_rate": 4.756432143908876e-06,
+ "loss": 0.544,
+ "step": 357
+ },
+ {
+ "epoch": 1.1189979123173277,
+ "grad_norm": 0.9402740597724915,
+ "learning_rate": 4.7545646865968645e-06,
+ "loss": 0.5656,
+ "step": 358
+ },
+ {
+ "epoch": 1.1221294363256784,
+ "grad_norm": 0.8210407495498657,
+ "learning_rate": 4.752690467088584e-06,
+ "loss": 0.5733,
+ "step": 359
+ },
+ {
+ "epoch": 1.1252609603340291,
+ "grad_norm": 0.795684278011322,
+ "learning_rate": 4.750809491005449e-06,
+ "loss": 0.5678,
+ "step": 360
+ },
+ {
+ "epoch": 1.12839248434238,
+ "grad_norm": 0.8712463974952698,
+ "learning_rate": 4.748921763989139e-06,
+ "loss": 0.5777,
+ "step": 361
+ },
+ {
+ "epoch": 1.1315240083507307,
+ "grad_norm": 0.9810119867324829,
+ "learning_rate": 4.747027291701578e-06,
+ "loss": 0.5511,
+ "step": 362
+ },
+ {
+ "epoch": 1.1346555323590815,
+ "grad_norm": 0.81117844581604,
+ "learning_rate": 4.745126079824926e-06,
+ "loss": 0.5038,
+ "step": 363
+ },
+ {
+ "epoch": 1.1377870563674322,
+ "grad_norm": 0.7631494402885437,
+ "learning_rate": 4.743218134061556e-06,
+ "loss": 0.6272,
+ "step": 364
+ },
+ {
+ "epoch": 1.1409185803757829,
+ "grad_norm": 0.7601696252822876,
+ "learning_rate": 4.741303460134038e-06,
+ "loss": 0.571,
+ "step": 365
+ },
+ {
+ "epoch": 1.1440501043841336,
+ "grad_norm": 1.7977744340896606,
+ "learning_rate": 4.7393820637851205e-06,
+ "loss": 0.538,
+ "step": 366
+ },
+ {
+ "epoch": 1.1471816283924843,
+ "grad_norm": 2.022578001022339,
+ "learning_rate": 4.737453950777718e-06,
+ "loss": 0.5822,
+ "step": 367
+ },
+ {
+ "epoch": 1.150313152400835,
+ "grad_norm": 0.7586764693260193,
+ "learning_rate": 4.735519126894885e-06,
+ "loss": 0.5986,
+ "step": 368
+ },
+ {
+ "epoch": 1.153444676409186,
+ "grad_norm": 0.8970286846160889,
+ "learning_rate": 4.733577597939812e-06,
+ "loss": 0.542,
+ "step": 369
+ },
+ {
+ "epoch": 1.1565762004175366,
+ "grad_norm": 0.8546352982521057,
+ "learning_rate": 4.731629369735793e-06,
+ "loss": 0.5832,
+ "step": 370
+ },
+ {
+ "epoch": 1.1597077244258873,
+ "grad_norm": 0.9266164898872375,
+ "learning_rate": 4.72967444812622e-06,
+ "loss": 0.551,
+ "step": 371
+ },
+ {
+ "epoch": 1.162839248434238,
+ "grad_norm": 1.0413658618927002,
+ "learning_rate": 4.7277128389745595e-06,
+ "loss": 0.5866,
+ "step": 372
+ },
+ {
+ "epoch": 1.1659707724425887,
+ "grad_norm": 0.9312199950218201,
+ "learning_rate": 4.7257445481643334e-06,
+ "loss": 0.5723,
+ "step": 373
+ },
+ {
+ "epoch": 1.1691022964509394,
+ "grad_norm": 0.7389806509017944,
+ "learning_rate": 4.723769581599109e-06,
+ "loss": 0.5209,
+ "step": 374
+ },
+ {
+ "epoch": 1.1722338204592901,
+ "grad_norm": 3.053169012069702,
+ "learning_rate": 4.721787945202472e-06,
+ "loss": 0.6094,
+ "step": 375
+ },
+ {
+ "epoch": 1.1753653444676408,
+ "grad_norm": 1.288589596748352,
+ "learning_rate": 4.719799644918017e-06,
+ "loss": 0.5616,
+ "step": 376
+ },
+ {
+ "epoch": 1.1784968684759916,
+ "grad_norm": 0.7675042152404785,
+ "learning_rate": 4.717804686709323e-06,
+ "loss": 0.4963,
+ "step": 377
+ },
+ {
+ "epoch": 1.1816283924843423,
+ "grad_norm": 0.7246491312980652,
+ "learning_rate": 4.715803076559938e-06,
+ "loss": 0.5273,
+ "step": 378
+ },
+ {
+ "epoch": 1.1847599164926932,
+ "grad_norm": 0.8193361759185791,
+ "learning_rate": 4.713794820473366e-06,
+ "loss": 0.6107,
+ "step": 379
+ },
+ {
+ "epoch": 1.187891440501044,
+ "grad_norm": 0.9498510360717773,
+ "learning_rate": 4.711779924473037e-06,
+ "loss": 0.5421,
+ "step": 380
+ },
+ {
+ "epoch": 1.1910229645093946,
+ "grad_norm": 1.0479756593704224,
+ "learning_rate": 4.709758394602305e-06,
+ "loss": 0.5257,
+ "step": 381
+ },
+ {
+ "epoch": 1.1941544885177453,
+ "grad_norm": 0.907866895198822,
+ "learning_rate": 4.707730236924413e-06,
+ "loss": 0.5289,
+ "step": 382
+ },
+ {
+ "epoch": 1.197286012526096,
+ "grad_norm": 0.8861165642738342,
+ "learning_rate": 4.705695457522488e-06,
+ "loss": 0.5727,
+ "step": 383
+ },
+ {
+ "epoch": 1.2004175365344467,
+ "grad_norm": 0.7467761039733887,
+ "learning_rate": 4.703654062499516e-06,
+ "loss": 0.5602,
+ "step": 384
+ },
+ {
+ "epoch": 1.2035490605427974,
+ "grad_norm": 0.7456198334693909,
+ "learning_rate": 4.701606057978325e-06,
+ "loss": 0.5345,
+ "step": 385
+ },
+ {
+ "epoch": 1.2066805845511483,
+ "grad_norm": 1.9976060390472412,
+ "learning_rate": 4.699551450101571e-06,
+ "loss": 0.5504,
+ "step": 386
+ },
+ {
+ "epoch": 1.209812108559499,
+ "grad_norm": 1.5253807306289673,
+ "learning_rate": 4.697490245031709e-06,
+ "loss": 0.5568,
+ "step": 387
+ },
+ {
+ "epoch": 1.2129436325678498,
+ "grad_norm": 1.0786075592041016,
+ "learning_rate": 4.6954224489509885e-06,
+ "loss": 0.5564,
+ "step": 388
+ },
+ {
+ "epoch": 1.2160751565762005,
+ "grad_norm": 0.8385995030403137,
+ "learning_rate": 4.693348068061422e-06,
+ "loss": 0.5341,
+ "step": 389
+ },
+ {
+ "epoch": 1.2192066805845512,
+ "grad_norm": 0.8184949159622192,
+ "learning_rate": 4.691267108584774e-06,
+ "loss": 0.5614,
+ "step": 390
+ },
+ {
+ "epoch": 1.2223382045929019,
+ "grad_norm": 0.9964898824691772,
+ "learning_rate": 4.68917957676254e-06,
+ "loss": 0.5589,
+ "step": 391
+ },
+ {
+ "epoch": 1.2254697286012526,
+ "grad_norm": 1.0168914794921875,
+ "learning_rate": 4.687085478855931e-06,
+ "loss": 0.5892,
+ "step": 392
+ },
+ {
+ "epoch": 1.2286012526096033,
+ "grad_norm": 0.8841140866279602,
+ "learning_rate": 4.684984821145846e-06,
+ "loss": 0.5327,
+ "step": 393
+ },
+ {
+ "epoch": 1.231732776617954,
+ "grad_norm": 0.834431529045105,
+ "learning_rate": 4.682877609932866e-06,
+ "loss": 0.5594,
+ "step": 394
+ },
+ {
+ "epoch": 1.2348643006263047,
+ "grad_norm": 0.7256641983985901,
+ "learning_rate": 4.6807638515372234e-06,
+ "loss": 0.5443,
+ "step": 395
+ },
+ {
+ "epoch": 1.2379958246346556,
+ "grad_norm": 0.765096127986908,
+ "learning_rate": 4.678643552298788e-06,
+ "loss": 0.5439,
+ "step": 396
+ },
+ {
+ "epoch": 1.2411273486430063,
+ "grad_norm": 0.8760455846786499,
+ "learning_rate": 4.676516718577051e-06,
+ "loss": 0.5485,
+ "step": 397
+ },
+ {
+ "epoch": 1.244258872651357,
+ "grad_norm": 2.7111501693725586,
+ "learning_rate": 4.674383356751099e-06,
+ "loss": 0.5696,
+ "step": 398
+ },
+ {
+ "epoch": 1.2473903966597077,
+ "grad_norm": 1.0521738529205322,
+ "learning_rate": 4.672243473219601e-06,
+ "loss": 0.5503,
+ "step": 399
+ },
+ {
+ "epoch": 1.2505219206680585,
+ "grad_norm": 0.8909669518470764,
+ "learning_rate": 4.670097074400785e-06,
+ "loss": 0.5183,
+ "step": 400
+ },
+ {
+ "epoch": 1.2536534446764092,
+ "grad_norm": 0.7483847737312317,
+ "learning_rate": 4.667944166732424e-06,
+ "loss": 0.5669,
+ "step": 401
+ },
+ {
+ "epoch": 1.2567849686847599,
+ "grad_norm": 1.146997094154358,
+ "learning_rate": 4.665784756671808e-06,
+ "loss": 0.5464,
+ "step": 402
+ },
+ {
+ "epoch": 1.2599164926931106,
+ "grad_norm": 0.8998096585273743,
+ "learning_rate": 4.663618850695733e-06,
+ "loss": 0.5502,
+ "step": 403
+ },
+ {
+ "epoch": 1.2630480167014615,
+ "grad_norm": 0.8882688283920288,
+ "learning_rate": 4.6614464553004795e-06,
+ "loss": 0.5507,
+ "step": 404
+ },
+ {
+ "epoch": 1.2661795407098122,
+ "grad_norm": 0.8310684561729431,
+ "learning_rate": 4.659267577001789e-06,
+ "loss": 0.5164,
+ "step": 405
+ },
+ {
+ "epoch": 1.269311064718163,
+ "grad_norm": 0.9286114573478699,
+ "learning_rate": 4.657082222334851e-06,
+ "loss": 0.4813,
+ "step": 406
+ },
+ {
+ "epoch": 1.2724425887265136,
+ "grad_norm": 1.2394906282424927,
+ "learning_rate": 4.654890397854275e-06,
+ "loss": 0.5837,
+ "step": 407
+ },
+ {
+ "epoch": 1.2755741127348643,
+ "grad_norm": 4.00585412979126,
+ "learning_rate": 4.652692110134079e-06,
+ "loss": 0.5453,
+ "step": 408
+ },
+ {
+ "epoch": 1.278705636743215,
+ "grad_norm": 1.1667803525924683,
+ "learning_rate": 4.650487365767667e-06,
+ "loss": 0.5652,
+ "step": 409
+ },
+ {
+ "epoch": 1.2818371607515657,
+ "grad_norm": 0.9351289868354797,
+ "learning_rate": 4.648276171367807e-06,
+ "loss": 0.5576,
+ "step": 410
+ },
+ {
+ "epoch": 1.2849686847599164,
+ "grad_norm": 0.8107728958129883,
+ "learning_rate": 4.646058533566614e-06,
+ "loss": 0.5821,
+ "step": 411
+ },
+ {
+ "epoch": 1.2881002087682671,
+ "grad_norm": 0.7293011546134949,
+ "learning_rate": 4.643834459015525e-06,
+ "loss": 0.5363,
+ "step": 412
+ },
+ {
+ "epoch": 1.2912317327766178,
+ "grad_norm": 0.7550690770149231,
+ "learning_rate": 4.641603954385289e-06,
+ "loss": 0.53,
+ "step": 413
+ },
+ {
+ "epoch": 1.2943632567849686,
+ "grad_norm": 0.7626177072525024,
+ "learning_rate": 4.639367026365938e-06,
+ "loss": 0.5307,
+ "step": 414
+ },
+ {
+ "epoch": 1.2974947807933195,
+ "grad_norm": 1.0841096639633179,
+ "learning_rate": 4.637123681666769e-06,
+ "loss": 0.5162,
+ "step": 415
+ },
+ {
+ "epoch": 1.3006263048016702,
+ "grad_norm": 0.8814271092414856,
+ "learning_rate": 4.634873927016326e-06,
+ "loss": 0.5369,
+ "step": 416
+ },
+ {
+ "epoch": 1.303757828810021,
+ "grad_norm": 0.7402971982955933,
+ "learning_rate": 4.632617769162378e-06,
+ "loss": 0.5846,
+ "step": 417
+ },
+ {
+ "epoch": 1.3068893528183716,
+ "grad_norm": 0.8106061220169067,
+ "learning_rate": 4.6303552148719e-06,
+ "loss": 0.5289,
+ "step": 418
+ },
+ {
+ "epoch": 1.3100208768267223,
+ "grad_norm": 0.9241361618041992,
+ "learning_rate": 4.628086270931053e-06,
+ "loss": 0.5714,
+ "step": 419
+ },
+ {
+ "epoch": 1.313152400835073,
+ "grad_norm": 0.950332522392273,
+ "learning_rate": 4.625810944145159e-06,
+ "loss": 0.5817,
+ "step": 420
+ },
+ {
+ "epoch": 1.316283924843424,
+ "grad_norm": 0.9037718772888184,
+ "learning_rate": 4.623529241338689e-06,
+ "loss": 0.5902,
+ "step": 421
+ },
+ {
+ "epoch": 1.3194154488517746,
+ "grad_norm": 1.2110658884048462,
+ "learning_rate": 4.621241169355234e-06,
+ "loss": 0.561,
+ "step": 422
+ },
+ {
+ "epoch": 1.3225469728601253,
+ "grad_norm": 0.8582742214202881,
+ "learning_rate": 4.618946735057491e-06,
+ "loss": 0.5003,
+ "step": 423
+ },
+ {
+ "epoch": 1.325678496868476,
+ "grad_norm": 0.9203405976295471,
+ "learning_rate": 4.6166459453272386e-06,
+ "loss": 0.5639,
+ "step": 424
+ },
+ {
+ "epoch": 1.3288100208768268,
+ "grad_norm": 0.933721125125885,
+ "learning_rate": 4.614338807065317e-06,
+ "loss": 0.5766,
+ "step": 425
+ },
+ {
+ "epoch": 1.3319415448851775,
+ "grad_norm": 0.8435131311416626,
+ "learning_rate": 4.612025327191608e-06,
+ "loss": 0.5656,
+ "step": 426
+ },
+ {
+ "epoch": 1.3350730688935282,
+ "grad_norm": 0.795796811580658,
+ "learning_rate": 4.609705512645015e-06,
+ "loss": 0.4996,
+ "step": 427
+ },
+ {
+ "epoch": 1.3382045929018789,
+ "grad_norm": 0.8168228268623352,
+ "learning_rate": 4.6073793703834404e-06,
+ "loss": 0.5465,
+ "step": 428
+ },
+ {
+ "epoch": 1.3413361169102296,
+ "grad_norm": 0.8795569539070129,
+ "learning_rate": 4.605046907383765e-06,
+ "loss": 0.5407,
+ "step": 429
+ },
+ {
+ "epoch": 1.3444676409185803,
+ "grad_norm": 0.8504094481468201,
+ "learning_rate": 4.6027081306418295e-06,
+ "loss": 0.5589,
+ "step": 430
+ },
+ {
+ "epoch": 1.347599164926931,
+ "grad_norm": 1.485202431678772,
+ "learning_rate": 4.600363047172409e-06,
+ "loss": 0.5515,
+ "step": 431
+ },
+ {
+ "epoch": 1.350730688935282,
+ "grad_norm": 1.1156851053237915,
+ "learning_rate": 4.598011664009197e-06,
+ "loss": 0.5681,
+ "step": 432
+ },
+ {
+ "epoch": 1.3538622129436326,
+ "grad_norm": 0.8666876554489136,
+ "learning_rate": 4.595653988204779e-06,
+ "loss": 0.5451,
+ "step": 433
+ },
+ {
+ "epoch": 1.3569937369519833,
+ "grad_norm": 0.8192381858825684,
+ "learning_rate": 4.593290026830619e-06,
+ "loss": 0.5632,
+ "step": 434
+ },
+ {
+ "epoch": 1.360125260960334,
+ "grad_norm": 0.7994804978370667,
+ "learning_rate": 4.590919786977029e-06,
+ "loss": 0.5181,
+ "step": 435
+ },
+ {
+ "epoch": 1.3632567849686847,
+ "grad_norm": 0.8038607835769653,
+ "learning_rate": 4.5885432757531535e-06,
+ "loss": 0.5385,
+ "step": 436
+ },
+ {
+ "epoch": 1.3663883089770354,
+ "grad_norm": 0.7677503824234009,
+ "learning_rate": 4.586160500286948e-06,
+ "loss": 0.5455,
+ "step": 437
+ },
+ {
+ "epoch": 1.3695198329853862,
+ "grad_norm": 0.8293285369873047,
+ "learning_rate": 4.583771467725157e-06,
+ "loss": 0.5401,
+ "step": 438
+ },
+ {
+ "epoch": 1.372651356993737,
+ "grad_norm": 0.8607680797576904,
+ "learning_rate": 4.581376185233289e-06,
+ "loss": 0.5782,
+ "step": 439
+ },
+ {
+ "epoch": 1.3757828810020878,
+ "grad_norm": 0.8847081065177917,
+ "learning_rate": 4.578974659995601e-06,
+ "loss": 0.572,
+ "step": 440
+ },
+ {
+ "epoch": 1.3789144050104385,
+ "grad_norm": 0.7669641971588135,
+ "learning_rate": 4.576566899215075e-06,
+ "loss": 0.5655,
+ "step": 441
+ },
+ {
+ "epoch": 1.3820459290187892,
+ "grad_norm": 0.8514629006385803,
+ "learning_rate": 4.5741529101133904e-06,
+ "loss": 0.5218,
+ "step": 442
+ },
+ {
+ "epoch": 1.38517745302714,
+ "grad_norm": 0.8719842433929443,
+ "learning_rate": 4.5717326999309145e-06,
+ "loss": 0.5579,
+ "step": 443
+ },
+ {
+ "epoch": 1.3883089770354906,
+ "grad_norm": 1.1142809391021729,
+ "learning_rate": 4.569306275926667e-06,
+ "loss": 0.5535,
+ "step": 444
+ },
+ {
+ "epoch": 1.3914405010438413,
+ "grad_norm": 0.7392387986183167,
+ "learning_rate": 4.566873645378309e-06,
+ "loss": 0.5335,
+ "step": 445
+ },
+ {
+ "epoch": 1.394572025052192,
+ "grad_norm": 0.9066658616065979,
+ "learning_rate": 4.564434815582117e-06,
+ "loss": 0.5286,
+ "step": 446
+ },
+ {
+ "epoch": 1.3977035490605427,
+ "grad_norm": 0.8648932576179504,
+ "learning_rate": 4.561989793852959e-06,
+ "loss": 0.5008,
+ "step": 447
+ },
+ {
+ "epoch": 1.4008350730688934,
+ "grad_norm": 0.7768712043762207,
+ "learning_rate": 4.559538587524276e-06,
+ "loss": 0.5727,
+ "step": 448
+ },
+ {
+ "epoch": 1.4039665970772441,
+ "grad_norm": 0.7851182222366333,
+ "learning_rate": 4.557081203948059e-06,
+ "loss": 0.5731,
+ "step": 449
+ },
+ {
+ "epoch": 1.407098121085595,
+ "grad_norm": 0.8959861397743225,
+ "learning_rate": 4.5546176504948255e-06,
+ "loss": 0.5587,
+ "step": 450
+ },
+ {
+ "epoch": 1.4102296450939458,
+ "grad_norm": 1.0538026094436646,
+ "learning_rate": 4.552147934553601e-06,
+ "loss": 0.5808,
+ "step": 451
+ },
+ {
+ "epoch": 1.4133611691022965,
+ "grad_norm": 0.9887629151344299,
+ "learning_rate": 4.54967206353189e-06,
+ "loss": 0.5658,
+ "step": 452
+ },
+ {
+ "epoch": 1.4164926931106472,
+ "grad_norm": 0.9579302072525024,
+ "learning_rate": 4.547190044855663e-06,
+ "loss": 0.5092,
+ "step": 453
+ },
+ {
+ "epoch": 1.4196242171189979,
+ "grad_norm": 0.6993522047996521,
+ "learning_rate": 4.544701885969326e-06,
+ "loss": 0.5233,
+ "step": 454
+ },
+ {
+ "epoch": 1.4227557411273486,
+ "grad_norm": 0.8197568655014038,
+ "learning_rate": 4.542207594335703e-06,
+ "loss": 0.553,
+ "step": 455
+ },
+ {
+ "epoch": 1.4258872651356993,
+ "grad_norm": 2.921947717666626,
+ "learning_rate": 4.53970717743601e-06,
+ "loss": 0.4857,
+ "step": 456
+ },
+ {
+ "epoch": 1.4290187891440502,
+ "grad_norm": 1.3547242879867554,
+ "learning_rate": 4.53720064276984e-06,
+ "loss": 0.5676,
+ "step": 457
+ },
+ {
+ "epoch": 1.432150313152401,
+ "grad_norm": 1.4175567626953125,
+ "learning_rate": 4.534687997855131e-06,
+ "loss": 0.5164,
+ "step": 458
+ },
+ {
+ "epoch": 1.4352818371607516,
+ "grad_norm": 1.378146767616272,
+ "learning_rate": 4.532169250228145e-06,
+ "loss": 0.5429,
+ "step": 459
+ },
+ {
+ "epoch": 1.4384133611691023,
+ "grad_norm": 0.7811698317527771,
+ "learning_rate": 4.529644407443456e-06,
+ "loss": 0.524,
+ "step": 460
+ },
+ {
+ "epoch": 1.441544885177453,
+ "grad_norm": 1.1481678485870361,
+ "learning_rate": 4.527113477073914e-06,
+ "loss": 0.5513,
+ "step": 461
+ },
+ {
+ "epoch": 1.4446764091858038,
+ "grad_norm": 0.8450161218643188,
+ "learning_rate": 4.5245764667106266e-06,
+ "loss": 0.5632,
+ "step": 462
+ },
+ {
+ "epoch": 1.4478079331941545,
+ "grad_norm": 1.1582145690917969,
+ "learning_rate": 4.522033383962941e-06,
+ "loss": 0.5834,
+ "step": 463
+ },
+ {
+ "epoch": 1.4509394572025052,
+ "grad_norm": 1.0403447151184082,
+ "learning_rate": 4.519484236458416e-06,
+ "loss": 0.506,
+ "step": 464
+ },
+ {
+ "epoch": 1.4540709812108559,
+ "grad_norm": 0.7894920706748962,
+ "learning_rate": 4.516929031842799e-06,
+ "loss": 0.5526,
+ "step": 465
+ },
+ {
+ "epoch": 1.4572025052192066,
+ "grad_norm": 0.8092262744903564,
+ "learning_rate": 4.51436777778001e-06,
+ "loss": 0.5619,
+ "step": 466
+ },
+ {
+ "epoch": 1.4603340292275573,
+ "grad_norm": 0.9773806929588318,
+ "learning_rate": 4.511800481952106e-06,
+ "loss": 0.5179,
+ "step": 467
+ },
+ {
+ "epoch": 1.4634655532359082,
+ "grad_norm": 1.018676519393921,
+ "learning_rate": 4.509227152059271e-06,
+ "loss": 0.5415,
+ "step": 468
+ },
+ {
+ "epoch": 1.466597077244259,
+ "grad_norm": 0.7457838654518127,
+ "learning_rate": 4.506647795819784e-06,
+ "loss": 0.5473,
+ "step": 469
+ },
+ {
+ "epoch": 1.4697286012526096,
+ "grad_norm": 0.7826436161994934,
+ "learning_rate": 4.50406242097e-06,
+ "loss": 0.5526,
+ "step": 470
+ },
+ {
+ "epoch": 1.4728601252609603,
+ "grad_norm": 0.9492483139038086,
+ "learning_rate": 4.501471035264328e-06,
+ "loss": 0.5179,
+ "step": 471
+ },
+ {
+ "epoch": 1.475991649269311,
+ "grad_norm": 0.93398517370224,
+ "learning_rate": 4.4988736464752005e-06,
+ "loss": 0.5195,
+ "step": 472
+ },
+ {
+ "epoch": 1.4791231732776617,
+ "grad_norm": 0.8396487832069397,
+ "learning_rate": 4.496270262393061e-06,
+ "loss": 0.5447,
+ "step": 473
+ },
+ {
+ "epoch": 1.4822546972860124,
+ "grad_norm": 0.7450584173202515,
+ "learning_rate": 4.4936608908263315e-06,
+ "loss": 0.5207,
+ "step": 474
+ },
+ {
+ "epoch": 1.4853862212943634,
+ "grad_norm": 0.7887717485427856,
+ "learning_rate": 4.491045539601392e-06,
+ "loss": 0.523,
+ "step": 475
+ },
+ {
+ "epoch": 1.488517745302714,
+ "grad_norm": 1.2051388025283813,
+ "learning_rate": 4.48842421656256e-06,
+ "loss": 0.5402,
+ "step": 476
+ },
+ {
+ "epoch": 1.4916492693110648,
+ "grad_norm": 2.3103389739990234,
+ "learning_rate": 4.485796929572063e-06,
+ "loss": 0.5588,
+ "step": 477
+ },
+ {
+ "epoch": 1.4947807933194155,
+ "grad_norm": 0.7473112344741821,
+ "learning_rate": 4.483163686510016e-06,
+ "loss": 0.5731,
+ "step": 478
+ },
+ {
+ "epoch": 1.4979123173277662,
+ "grad_norm": 0.7545126676559448,
+ "learning_rate": 4.480524495274399e-06,
+ "loss": 0.5536,
+ "step": 479
+ },
+ {
+ "epoch": 1.501043841336117,
+ "grad_norm": 0.7801297903060913,
+ "learning_rate": 4.477879363781033e-06,
+ "loss": 0.5696,
+ "step": 480
+ },
+ {
+ "epoch": 1.5041753653444676,
+ "grad_norm": 0.7740563750267029,
+ "learning_rate": 4.475228299963554e-06,
+ "loss": 0.5526,
+ "step": 481
+ },
+ {
+ "epoch": 1.5073068893528183,
+ "grad_norm": 0.8600060343742371,
+ "learning_rate": 4.4725713117733936e-06,
+ "loss": 0.5051,
+ "step": 482
+ },
+ {
+ "epoch": 1.510438413361169,
+ "grad_norm": 0.6934283971786499,
+ "learning_rate": 4.46990840717975e-06,
+ "loss": 0.5564,
+ "step": 483
+ },
+ {
+ "epoch": 1.5135699373695197,
+ "grad_norm": 0.8927920460700989,
+ "learning_rate": 4.46723959416957e-06,
+ "loss": 0.5529,
+ "step": 484
+ },
+ {
+ "epoch": 1.5167014613778704,
+ "grad_norm": 0.9570988416671753,
+ "learning_rate": 4.464564880747517e-06,
+ "loss": 0.5661,
+ "step": 485
+ },
+ {
+ "epoch": 1.5198329853862211,
+ "grad_norm": 0.7229202389717102,
+ "learning_rate": 4.461884274935956e-06,
+ "loss": 0.5964,
+ "step": 486
+ },
+ {
+ "epoch": 1.522964509394572,
+ "grad_norm": 0.7367239594459534,
+ "learning_rate": 4.4591977847749225e-06,
+ "loss": 0.5455,
+ "step": 487
+ },
+ {
+ "epoch": 1.5260960334029228,
+ "grad_norm": 0.8062120676040649,
+ "learning_rate": 4.456505418322103e-06,
+ "loss": 0.5735,
+ "step": 488
+ },
+ {
+ "epoch": 1.5292275574112735,
+ "grad_norm": 0.8854482769966125,
+ "learning_rate": 4.453807183652808e-06,
+ "loss": 0.5421,
+ "step": 489
+ },
+ {
+ "epoch": 1.5323590814196242,
+ "grad_norm": 0.7518959045410156,
+ "learning_rate": 4.451103088859951e-06,
+ "loss": 0.5083,
+ "step": 490
+ },
+ {
+ "epoch": 1.535490605427975,
+ "grad_norm": 0.8621206879615784,
+ "learning_rate": 4.448393142054016e-06,
+ "loss": 0.4712,
+ "step": 491
+ },
+ {
+ "epoch": 1.5386221294363258,
+ "grad_norm": 1.0618741512298584,
+ "learning_rate": 4.445677351363046e-06,
+ "loss": 0.5808,
+ "step": 492
+ },
+ {
+ "epoch": 1.5417536534446765,
+ "grad_norm": 0.8261345028877258,
+ "learning_rate": 4.442955724932607e-06,
+ "loss": 0.5625,
+ "step": 493
+ },
+ {
+ "epoch": 1.5448851774530272,
+ "grad_norm": 0.7067139744758606,
+ "learning_rate": 4.440228270925772e-06,
+ "loss": 0.5661,
+ "step": 494
+ },
+ {
+ "epoch": 1.548016701461378,
+ "grad_norm": 0.9234416484832764,
+ "learning_rate": 4.437494997523091e-06,
+ "loss": 0.5428,
+ "step": 495
+ },
+ {
+ "epoch": 1.5511482254697286,
+ "grad_norm": 0.9273470044136047,
+ "learning_rate": 4.434755912922567e-06,
+ "loss": 0.5388,
+ "step": 496
+ },
+ {
+ "epoch": 1.5542797494780793,
+ "grad_norm": 1.0163263082504272,
+ "learning_rate": 4.4320110253396345e-06,
+ "loss": 0.5409,
+ "step": 497
+ },
+ {
+ "epoch": 1.55741127348643,
+ "grad_norm": 0.9542096853256226,
+ "learning_rate": 4.429260343007133e-06,
+ "loss": 0.5329,
+ "step": 498
+ },
+ {
+ "epoch": 1.5605427974947808,
+ "grad_norm": 0.8076801896095276,
+ "learning_rate": 4.426503874175283e-06,
+ "loss": 0.5616,
+ "step": 499
+ },
+ {
+ "epoch": 1.5636743215031315,
+ "grad_norm": 1.0063767433166504,
+ "learning_rate": 4.423741627111658e-06,
+ "loss": 0.5369,
+ "step": 500
+ },
+ {
+ "epoch": 1.5668058455114822,
+ "grad_norm": 1.040286898612976,
+ "learning_rate": 4.420973610101166e-06,
+ "loss": 0.5474,
+ "step": 501
+ },
+ {
+ "epoch": 1.5699373695198329,
+ "grad_norm": 0.7832860946655273,
+ "learning_rate": 4.4181998314460164e-06,
+ "loss": 0.5486,
+ "step": 502
+ },
+ {
+ "epoch": 1.5730688935281836,
+ "grad_norm": 0.8162257075309753,
+ "learning_rate": 4.415420299465706e-06,
+ "loss": 0.5054,
+ "step": 503
+ },
+ {
+ "epoch": 1.5762004175365343,
+ "grad_norm": 0.9108433127403259,
+ "learning_rate": 4.4126350224969814e-06,
+ "loss": 0.5399,
+ "step": 504
+ },
+ {
+ "epoch": 1.5793319415448852,
+ "grad_norm": 0.8002520799636841,
+ "learning_rate": 4.409844008893824e-06,
+ "loss": 0.5485,
+ "step": 505
+ },
+ {
+ "epoch": 1.582463465553236,
+ "grad_norm": 0.8543248772621155,
+ "learning_rate": 4.407047267027423e-06,
+ "loss": 0.4984,
+ "step": 506
+ },
+ {
+ "epoch": 1.5855949895615866,
+ "grad_norm": 0.7154155373573303,
+ "learning_rate": 4.404244805286141e-06,
+ "loss": 0.5392,
+ "step": 507
+ },
+ {
+ "epoch": 1.5887265135699373,
+ "grad_norm": 0.818553626537323,
+ "learning_rate": 4.401436632075504e-06,
+ "loss": 0.5178,
+ "step": 508
+ },
+ {
+ "epoch": 1.5918580375782883,
+ "grad_norm": 0.7535017728805542,
+ "learning_rate": 4.398622755818167e-06,
+ "loss": 0.5446,
+ "step": 509
+ },
+ {
+ "epoch": 1.594989561586639,
+ "grad_norm": 0.9328975677490234,
+ "learning_rate": 4.395803184953889e-06,
+ "loss": 0.5546,
+ "step": 510
+ },
+ {
+ "epoch": 1.5981210855949897,
+ "grad_norm": 0.7960026860237122,
+ "learning_rate": 4.392977927939508e-06,
+ "loss": 0.5451,
+ "step": 511
+ },
+ {
+ "epoch": 1.6012526096033404,
+ "grad_norm": 0.9686267971992493,
+ "learning_rate": 4.3901469932489195e-06,
+ "loss": 0.5198,
+ "step": 512
+ },
+ {
+ "epoch": 1.604384133611691,
+ "grad_norm": 0.903137743473053,
+ "learning_rate": 4.387310389373047e-06,
+ "loss": 0.5395,
+ "step": 513
+ },
+ {
+ "epoch": 1.6075156576200418,
+ "grad_norm": 1.0728516578674316,
+ "learning_rate": 4.384468124819816e-06,
+ "loss": 0.5843,
+ "step": 514
+ },
+ {
+ "epoch": 1.6106471816283925,
+ "grad_norm": 1.0245436429977417,
+ "learning_rate": 4.3816202081141345e-06,
+ "loss": 0.5672,
+ "step": 515
+ },
+ {
+ "epoch": 1.6137787056367432,
+ "grad_norm": 0.9672732353210449,
+ "learning_rate": 4.378766647797858e-06,
+ "loss": 0.5369,
+ "step": 516
+ },
+ {
+ "epoch": 1.616910229645094,
+ "grad_norm": 0.9149513840675354,
+ "learning_rate": 4.375907452429774e-06,
+ "loss": 0.4628,
+ "step": 517
+ },
+ {
+ "epoch": 1.6200417536534446,
+ "grad_norm": 0.7543843984603882,
+ "learning_rate": 4.373042630585567e-06,
+ "loss": 0.5344,
+ "step": 518
+ },
+ {
+ "epoch": 1.6231732776617953,
+ "grad_norm": 0.7589017152786255,
+ "learning_rate": 4.370172190857801e-06,
+ "loss": 0.5672,
+ "step": 519
+ },
+ {
+ "epoch": 1.626304801670146,
+ "grad_norm": 0.803040623664856,
+ "learning_rate": 4.367296141855887e-06,
+ "loss": 0.5313,
+ "step": 520
+ },
+ {
+ "epoch": 1.6294363256784967,
+ "grad_norm": 0.8305794596672058,
+ "learning_rate": 4.3644144922060625e-06,
+ "loss": 0.5754,
+ "step": 521
+ },
+ {
+ "epoch": 1.6325678496868476,
+ "grad_norm": 1.0086486339569092,
+ "learning_rate": 4.361527250551361e-06,
+ "loss": 0.5433,
+ "step": 522
+ },
+ {
+ "epoch": 1.6356993736951984,
+ "grad_norm": 0.7217550277709961,
+ "learning_rate": 4.35863442555159e-06,
+ "loss": 0.524,
+ "step": 523
+ },
+ {
+ "epoch": 1.638830897703549,
+ "grad_norm": 0.7788524627685547,
+ "learning_rate": 4.355736025883303e-06,
+ "loss": 0.536,
+ "step": 524
+ },
+ {
+ "epoch": 1.6419624217118998,
+ "grad_norm": 0.8460550904273987,
+ "learning_rate": 4.352832060239774e-06,
+ "loss": 0.5381,
+ "step": 525
+ },
+ {
+ "epoch": 1.6450939457202505,
+ "grad_norm": 0.7571215033531189,
+ "learning_rate": 4.3499225373309675e-06,
+ "loss": 0.541,
+ "step": 526
+ },
+ {
+ "epoch": 1.6482254697286014,
+ "grad_norm": 0.7343226671218872,
+ "learning_rate": 4.347007465883523e-06,
+ "loss": 0.5147,
+ "step": 527
+ },
+ {
+ "epoch": 1.651356993736952,
+ "grad_norm": 0.7271892428398132,
+ "learning_rate": 4.3440868546407165e-06,
+ "loss": 0.5311,
+ "step": 528
+ },
+ {
+ "epoch": 1.6544885177453028,
+ "grad_norm": 0.8166136741638184,
+ "learning_rate": 4.341160712362442e-06,
+ "loss": 0.5379,
+ "step": 529
+ },
+ {
+ "epoch": 1.6576200417536535,
+ "grad_norm": 1.5985233783721924,
+ "learning_rate": 4.338229047825182e-06,
+ "loss": 0.5782,
+ "step": 530
+ },
+ {
+ "epoch": 1.6607515657620042,
+ "grad_norm": 0.7835702896118164,
+ "learning_rate": 4.3352918698219835e-06,
+ "loss": 0.525,
+ "step": 531
+ },
+ {
+ "epoch": 1.663883089770355,
+ "grad_norm": 0.7278687953948975,
+ "learning_rate": 4.332349187162428e-06,
+ "loss": 0.5266,
+ "step": 532
+ },
+ {
+ "epoch": 1.6670146137787056,
+ "grad_norm": 0.8240190148353577,
+ "learning_rate": 4.329401008672608e-06,
+ "loss": 0.5515,
+ "step": 533
+ },
+ {
+ "epoch": 1.6701461377870563,
+ "grad_norm": 0.9447080492973328,
+ "learning_rate": 4.326447343195102e-06,
+ "loss": 0.5596,
+ "step": 534
+ },
+ {
+ "epoch": 1.673277661795407,
+ "grad_norm": 0.7827372550964355,
+ "learning_rate": 4.323488199588944e-06,
+ "loss": 0.5466,
+ "step": 535
+ },
+ {
+ "epoch": 1.6764091858037578,
+ "grad_norm": 0.9252517223358154,
+ "learning_rate": 4.320523586729599e-06,
+ "loss": 0.5433,
+ "step": 536
+ },
+ {
+ "epoch": 1.6795407098121085,
+ "grad_norm": 0.9437504410743713,
+ "learning_rate": 4.317553513508934e-06,
+ "loss": 0.5552,
+ "step": 537
+ },
+ {
+ "epoch": 1.6826722338204592,
+ "grad_norm": 0.8972746133804321,
+ "learning_rate": 4.3145779888351986e-06,
+ "loss": 0.5259,
+ "step": 538
+ },
+ {
+ "epoch": 1.6858037578288099,
+ "grad_norm": 0.8017446994781494,
+ "learning_rate": 4.311597021632988e-06,
+ "loss": 0.5263,
+ "step": 539
+ },
+ {
+ "epoch": 1.6889352818371608,
+ "grad_norm": 0.7875497341156006,
+ "learning_rate": 4.3086106208432235e-06,
+ "loss": 0.5316,
+ "step": 540
+ },
+ {
+ "epoch": 1.6920668058455115,
+ "grad_norm": 0.8204905986785889,
+ "learning_rate": 4.305618795423125e-06,
+ "loss": 0.5506,
+ "step": 541
+ },
+ {
+ "epoch": 1.6951983298538622,
+ "grad_norm": 0.888359785079956,
+ "learning_rate": 4.30262155434618e-06,
+ "loss": 0.4825,
+ "step": 542
+ },
+ {
+ "epoch": 1.698329853862213,
+ "grad_norm": 1.1026058197021484,
+ "learning_rate": 4.29961890660212e-06,
+ "loss": 0.5321,
+ "step": 543
+ },
+ {
+ "epoch": 1.7014613778705638,
+ "grad_norm": 0.7662535905838013,
+ "learning_rate": 4.2966108611968945e-06,
+ "loss": 0.5432,
+ "step": 544
+ },
+ {
+ "epoch": 1.7045929018789145,
+ "grad_norm": 1.1951749324798584,
+ "learning_rate": 4.293597427152641e-06,
+ "loss": 0.5123,
+ "step": 545
+ },
+ {
+ "epoch": 1.7077244258872653,
+ "grad_norm": 1.303183913230896,
+ "learning_rate": 4.290578613507661e-06,
+ "loss": 0.5346,
+ "step": 546
+ },
+ {
+ "epoch": 1.710855949895616,
+ "grad_norm": 0.7653357982635498,
+ "learning_rate": 4.287554429316387e-06,
+ "loss": 0.5397,
+ "step": 547
+ },
+ {
+ "epoch": 1.7139874739039667,
+ "grad_norm": 0.796215295791626,
+ "learning_rate": 4.284524883649366e-06,
+ "loss": 0.5421,
+ "step": 548
+ },
+ {
+ "epoch": 1.7171189979123174,
+ "grad_norm": 0.7599332332611084,
+ "learning_rate": 4.281489985593219e-06,
+ "loss": 0.5289,
+ "step": 549
+ },
+ {
+ "epoch": 1.720250521920668,
+ "grad_norm": 0.8029115796089172,
+ "learning_rate": 4.2784497442506265e-06,
+ "loss": 0.5409,
+ "step": 550
+ },
+ {
+ "epoch": 1.7233820459290188,
+ "grad_norm": 0.7194099426269531,
+ "learning_rate": 4.275404168740291e-06,
+ "loss": 0.5327,
+ "step": 551
+ },
+ {
+ "epoch": 1.7265135699373695,
+ "grad_norm": 0.7960740923881531,
+ "learning_rate": 4.272353268196917e-06,
+ "loss": 0.4896,
+ "step": 552
+ },
+ {
+ "epoch": 1.7296450939457202,
+ "grad_norm": 0.9572116732597351,
+ "learning_rate": 4.269297051771178e-06,
+ "loss": 0.5402,
+ "step": 553
+ },
+ {
+ "epoch": 1.732776617954071,
+ "grad_norm": 1.3604938983917236,
+ "learning_rate": 4.266235528629695e-06,
+ "loss": 0.5792,
+ "step": 554
+ },
+ {
+ "epoch": 1.7359081419624216,
+ "grad_norm": 2.067286729812622,
+ "learning_rate": 4.263168707955002e-06,
+ "loss": 0.5033,
+ "step": 555
+ },
+ {
+ "epoch": 1.7390396659707723,
+ "grad_norm": 0.8031097054481506,
+ "learning_rate": 4.260096598945523e-06,
+ "loss": 0.5117,
+ "step": 556
+ },
+ {
+ "epoch": 1.742171189979123,
+ "grad_norm": 1.0241729021072388,
+ "learning_rate": 4.257019210815546e-06,
+ "loss": 0.5359,
+ "step": 557
+ },
+ {
+ "epoch": 1.745302713987474,
+ "grad_norm": 0.7625218629837036,
+ "learning_rate": 4.25393655279519e-06,
+ "loss": 0.5625,
+ "step": 558
+ },
+ {
+ "epoch": 1.7484342379958246,
+ "grad_norm": 0.8603503704071045,
+ "learning_rate": 4.250848634130381e-06,
+ "loss": 0.5043,
+ "step": 559
+ },
+ {
+ "epoch": 1.7515657620041754,
+ "grad_norm": 0.9543750286102295,
+ "learning_rate": 4.247755464082824e-06,
+ "loss": 0.5364,
+ "step": 560
+ },
+ {
+ "epoch": 1.754697286012526,
+ "grad_norm": 0.9707463979721069,
+ "learning_rate": 4.244657051929973e-06,
+ "loss": 0.5184,
+ "step": 561
+ },
+ {
+ "epoch": 1.757828810020877,
+ "grad_norm": 0.7491432428359985,
+ "learning_rate": 4.241553406965008e-06,
+ "loss": 0.559,
+ "step": 562
+ },
+ {
+ "epoch": 1.7609603340292277,
+ "grad_norm": 0.7444972991943359,
+ "learning_rate": 4.238444538496801e-06,
+ "loss": 0.5327,
+ "step": 563
+ },
+ {
+ "epoch": 1.7640918580375784,
+ "grad_norm": 2.7108678817749023,
+ "learning_rate": 4.235330455849892e-06,
+ "loss": 0.55,
+ "step": 564
+ },
+ {
+ "epoch": 1.767223382045929,
+ "grad_norm": 1.6716049909591675,
+ "learning_rate": 4.232211168364459e-06,
+ "loss": 0.5093,
+ "step": 565
+ },
+ {
+ "epoch": 1.7703549060542798,
+ "grad_norm": 0.7023475170135498,
+ "learning_rate": 4.229086685396295e-06,
+ "loss": 0.569,
+ "step": 566
+ },
+ {
+ "epoch": 1.7734864300626305,
+ "grad_norm": 0.8596265316009521,
+ "learning_rate": 4.225957016316771e-06,
+ "loss": 0.5128,
+ "step": 567
+ },
+ {
+ "epoch": 1.7766179540709812,
+ "grad_norm": 0.8110849857330322,
+ "learning_rate": 4.222822170512816e-06,
+ "loss": 0.5142,
+ "step": 568
+ },
+ {
+ "epoch": 1.779749478079332,
+ "grad_norm": 0.7583725452423096,
+ "learning_rate": 4.219682157386884e-06,
+ "loss": 0.5584,
+ "step": 569
+ },
+ {
+ "epoch": 1.7828810020876826,
+ "grad_norm": 0.787811279296875,
+ "learning_rate": 4.21653698635693e-06,
+ "loss": 0.5068,
+ "step": 570
+ },
+ {
+ "epoch": 1.7860125260960333,
+ "grad_norm": 0.8298993110656738,
+ "learning_rate": 4.213386666856375e-06,
+ "loss": 0.5496,
+ "step": 571
+ },
+ {
+ "epoch": 1.789144050104384,
+ "grad_norm": 0.8999841213226318,
+ "learning_rate": 4.210231208334087e-06,
+ "loss": 0.5454,
+ "step": 572
+ },
+ {
+ "epoch": 1.7922755741127347,
+ "grad_norm": 4.264521598815918,
+ "learning_rate": 4.207070620254345e-06,
+ "loss": 0.5486,
+ "step": 573
+ },
+ {
+ "epoch": 1.7954070981210855,
+ "grad_norm": 0.8517448306083679,
+ "learning_rate": 4.203904912096812e-06,
+ "loss": 0.5566,
+ "step": 574
+ },
+ {
+ "epoch": 1.7985386221294362,
+ "grad_norm": 0.9230182766914368,
+ "learning_rate": 4.200734093356511e-06,
+ "loss": 0.4964,
+ "step": 575
+ },
+ {
+ "epoch": 1.801670146137787,
+ "grad_norm": 1.224039912223816,
+ "learning_rate": 4.197558173543791e-06,
+ "loss": 0.5356,
+ "step": 576
+ },
+ {
+ "epoch": 1.8048016701461378,
+ "grad_norm": 0.9998573660850525,
+ "learning_rate": 4.194377162184301e-06,
+ "loss": 0.5334,
+ "step": 577
+ },
+ {
+ "epoch": 1.8079331941544885,
+ "grad_norm": 0.865521252155304,
+ "learning_rate": 4.191191068818963e-06,
+ "loss": 0.5036,
+ "step": 578
+ },
+ {
+ "epoch": 1.8110647181628392,
+ "grad_norm": 0.8048138618469238,
+ "learning_rate": 4.18799990300394e-06,
+ "loss": 0.4979,
+ "step": 579
+ },
+ {
+ "epoch": 1.8141962421711901,
+ "grad_norm": 0.717815637588501,
+ "learning_rate": 4.184803674310609e-06,
+ "loss": 0.5623,
+ "step": 580
+ },
+ {
+ "epoch": 1.8173277661795408,
+ "grad_norm": 0.8403327465057373,
+ "learning_rate": 4.1816023923255335e-06,
+ "loss": 0.5055,
+ "step": 581
+ },
+ {
+ "epoch": 1.8204592901878915,
+ "grad_norm": 0.7298995852470398,
+ "learning_rate": 4.178396066650432e-06,
+ "loss": 0.5641,
+ "step": 582
+ },
+ {
+ "epoch": 1.8235908141962422,
+ "grad_norm": 0.9469727873802185,
+ "learning_rate": 4.1751847069021516e-06,
+ "loss": 0.5557,
+ "step": 583
+ },
+ {
+ "epoch": 1.826722338204593,
+ "grad_norm": 0.8641784191131592,
+ "learning_rate": 4.1719683227126386e-06,
+ "loss": 0.5153,
+ "step": 584
+ },
+ {
+ "epoch": 1.8298538622129437,
+ "grad_norm": 0.7316668629646301,
+ "learning_rate": 4.168746923728908e-06,
+ "loss": 0.4988,
+ "step": 585
+ },
+ {
+ "epoch": 1.8329853862212944,
+ "grad_norm": 0.8795468807220459,
+ "learning_rate": 4.165520519613017e-06,
+ "loss": 0.5483,
+ "step": 586
+ },
+ {
+ "epoch": 1.836116910229645,
+ "grad_norm": 0.7323560118675232,
+ "learning_rate": 4.162289120042034e-06,
+ "loss": 0.5194,
+ "step": 587
+ },
+ {
+ "epoch": 1.8392484342379958,
+ "grad_norm": 0.8217021822929382,
+ "learning_rate": 4.159052734708013e-06,
+ "loss": 0.532,
+ "step": 588
+ },
+ {
+ "epoch": 1.8423799582463465,
+ "grad_norm": 0.7669674754142761,
+ "learning_rate": 4.155811373317958e-06,
+ "loss": 0.541,
+ "step": 589
+ },
+ {
+ "epoch": 1.8455114822546972,
+ "grad_norm": 0.8312156200408936,
+ "learning_rate": 4.152565045593801e-06,
+ "loss": 0.5298,
+ "step": 590
+ },
+ {
+ "epoch": 1.848643006263048,
+ "grad_norm": 0.8967565298080444,
+ "learning_rate": 4.1493137612723665e-06,
+ "loss": 0.51,
+ "step": 591
+ },
+ {
+ "epoch": 1.8517745302713986,
+ "grad_norm": 0.8706664443016052,
+ "learning_rate": 4.14605753010535e-06,
+ "loss": 0.4941,
+ "step": 592
+ },
+ {
+ "epoch": 1.8549060542797495,
+ "grad_norm": 0.7585753798484802,
+ "learning_rate": 4.14279636185928e-06,
+ "loss": 0.5161,
+ "step": 593
+ },
+ {
+ "epoch": 1.8580375782881002,
+ "grad_norm": 0.7495241165161133,
+ "learning_rate": 4.1395302663154954e-06,
+ "loss": 0.5388,
+ "step": 594
+ },
+ {
+ "epoch": 1.861169102296451,
+ "grad_norm": 1.0746862888336182,
+ "learning_rate": 4.136259253270114e-06,
+ "loss": 0.4976,
+ "step": 595
+ },
+ {
+ "epoch": 1.8643006263048016,
+ "grad_norm": 0.872309684753418,
+ "learning_rate": 4.132983332534e-06,
+ "loss": 0.559,
+ "step": 596
+ },
+ {
+ "epoch": 1.8674321503131524,
+ "grad_norm": 0.8759891986846924,
+ "learning_rate": 4.1297025139327405e-06,
+ "loss": 0.5436,
+ "step": 597
+ },
+ {
+ "epoch": 1.8705636743215033,
+ "grad_norm": 1.1044493913650513,
+ "learning_rate": 4.126416807306611e-06,
+ "loss": 0.5476,
+ "step": 598
+ },
+ {
+ "epoch": 1.873695198329854,
+ "grad_norm": 0.8340442180633545,
+ "learning_rate": 4.123126222510549e-06,
+ "loss": 0.4592,
+ "step": 599
+ },
+ {
+ "epoch": 1.8768267223382047,
+ "grad_norm": 0.8331449031829834,
+ "learning_rate": 4.119830769414123e-06,
+ "loss": 0.5219,
+ "step": 600
+ },
+ {
+ "epoch": 1.8799582463465554,
+ "grad_norm": 1.0862973928451538,
+ "learning_rate": 4.116530457901503e-06,
+ "loss": 0.5159,
+ "step": 601
+ },
+ {
+ "epoch": 1.883089770354906,
+ "grad_norm": 0.8524414300918579,
+ "learning_rate": 4.113225297871431e-06,
+ "loss": 0.5502,
+ "step": 602
+ },
+ {
+ "epoch": 1.8862212943632568,
+ "grad_norm": 1.4945416450500488,
+ "learning_rate": 4.10991529923719e-06,
+ "loss": 0.5627,
+ "step": 603
+ },
+ {
+ "epoch": 1.8893528183716075,
+ "grad_norm": 1.5518157482147217,
+ "learning_rate": 4.10660047192658e-06,
+ "loss": 0.5517,
+ "step": 604
+ },
+ {
+ "epoch": 1.8924843423799582,
+ "grad_norm": 2.56638765335083,
+ "learning_rate": 4.103280825881878e-06,
+ "loss": 0.5422,
+ "step": 605
+ },
+ {
+ "epoch": 1.895615866388309,
+ "grad_norm": 0.867254912853241,
+ "learning_rate": 4.099956371059817e-06,
+ "loss": 0.4991,
+ "step": 606
+ },
+ {
+ "epoch": 1.8987473903966596,
+ "grad_norm": 0.9555892944335938,
+ "learning_rate": 4.096627117431554e-06,
+ "loss": 0.5339,
+ "step": 607
+ },
+ {
+ "epoch": 1.9018789144050103,
+ "grad_norm": 0.7905483245849609,
+ "learning_rate": 4.093293074982638e-06,
+ "loss": 0.5168,
+ "step": 608
+ },
+ {
+ "epoch": 1.905010438413361,
+ "grad_norm": 0.7500227093696594,
+ "learning_rate": 4.089954253712981e-06,
+ "loss": 0.5096,
+ "step": 609
+ },
+ {
+ "epoch": 1.9081419624217117,
+ "grad_norm": 0.8458324074745178,
+ "learning_rate": 4.086610663636828e-06,
+ "loss": 0.5296,
+ "step": 610
+ },
+ {
+ "epoch": 1.9112734864300627,
+ "grad_norm": 0.7392706871032715,
+ "learning_rate": 4.08326231478273e-06,
+ "loss": 0.5305,
+ "step": 611
+ },
+ {
+ "epoch": 1.9144050104384134,
+ "grad_norm": 0.8113343715667725,
+ "learning_rate": 4.079909217193508e-06,
+ "loss": 0.5044,
+ "step": 612
+ },
+ {
+ "epoch": 1.917536534446764,
+ "grad_norm": 0.7637801766395569,
+ "learning_rate": 4.076551380926226e-06,
+ "loss": 0.5298,
+ "step": 613
+ },
+ {
+ "epoch": 1.9206680584551148,
+ "grad_norm": 1.0523375272750854,
+ "learning_rate": 4.073188816052164e-06,
+ "loss": 0.5111,
+ "step": 614
+ },
+ {
+ "epoch": 1.9237995824634657,
+ "grad_norm": 0.8224868774414062,
+ "learning_rate": 4.069821532656781e-06,
+ "loss": 0.5178,
+ "step": 615
+ },
+ {
+ "epoch": 1.9269311064718164,
+ "grad_norm": 0.7270777821540833,
+ "learning_rate": 4.066449540839693e-06,
+ "loss": 0.5307,
+ "step": 616
+ },
+ {
+ "epoch": 1.9300626304801671,
+ "grad_norm": 0.7214602828025818,
+ "learning_rate": 4.063072850714631e-06,
+ "loss": 0.5171,
+ "step": 617
+ },
+ {
+ "epoch": 1.9331941544885178,
+ "grad_norm": 0.7333671450614929,
+ "learning_rate": 4.059691472409426e-06,
+ "loss": 0.56,
+ "step": 618
+ },
+ {
+ "epoch": 1.9363256784968685,
+ "grad_norm": 0.9166824221611023,
+ "learning_rate": 4.056305416065964e-06,
+ "loss": 0.5388,
+ "step": 619
+ },
+ {
+ "epoch": 1.9394572025052192,
+ "grad_norm": 0.7743303775787354,
+ "learning_rate": 4.052914691840167e-06,
+ "loss": 0.5134,
+ "step": 620
+ },
+ {
+ "epoch": 1.94258872651357,
+ "grad_norm": 0.704097330570221,
+ "learning_rate": 4.0495193099019524e-06,
+ "loss": 0.4926,
+ "step": 621
+ },
+ {
+ "epoch": 1.9457202505219207,
+ "grad_norm": 0.8508503437042236,
+ "learning_rate": 4.046119280435212e-06,
+ "loss": 0.5008,
+ "step": 622
+ },
+ {
+ "epoch": 1.9488517745302714,
+ "grad_norm": 0.725933313369751,
+ "learning_rate": 4.042714613637775e-06,
+ "loss": 0.5549,
+ "step": 623
+ },
+ {
+ "epoch": 1.951983298538622,
+ "grad_norm": 0.8919175863265991,
+ "learning_rate": 4.039305319721381e-06,
+ "loss": 0.5183,
+ "step": 624
+ },
+ {
+ "epoch": 1.9551148225469728,
+ "grad_norm": 0.827919065952301,
+ "learning_rate": 4.035891408911644e-06,
+ "loss": 0.5624,
+ "step": 625
+ },
+ {
+ "epoch": 1.9582463465553235,
+ "grad_norm": 0.7415187358856201,
+ "learning_rate": 4.032472891448032e-06,
+ "loss": 0.5454,
+ "step": 626
+ },
+ {
+ "epoch": 1.9613778705636742,
+ "grad_norm": 0.7675788998603821,
+ "learning_rate": 4.029049777583824e-06,
+ "loss": 0.5361,
+ "step": 627
+ },
+ {
+ "epoch": 1.964509394572025,
+ "grad_norm": 0.8464030623435974,
+ "learning_rate": 4.025622077586088e-06,
+ "loss": 0.5295,
+ "step": 628
+ },
+ {
+ "epoch": 1.9676409185803758,
+ "grad_norm": 0.7641633749008179,
+ "learning_rate": 4.022189801735646e-06,
+ "loss": 0.55,
+ "step": 629
+ },
+ {
+ "epoch": 1.9707724425887265,
+ "grad_norm": 0.7813227772712708,
+ "learning_rate": 4.018752960327048e-06,
+ "loss": 0.5587,
+ "step": 630
+ },
+ {
+ "epoch": 1.9739039665970772,
+ "grad_norm": 0.7576701641082764,
+ "learning_rate": 4.015311563668533e-06,
+ "loss": 0.5413,
+ "step": 631
+ },
+ {
+ "epoch": 1.977035490605428,
+ "grad_norm": 0.6949650049209595,
+ "learning_rate": 4.011865622082004e-06,
+ "loss": 0.5344,
+ "step": 632
+ },
+ {
+ "epoch": 1.9801670146137789,
+ "grad_norm": 0.9009145498275757,
+ "learning_rate": 4.008415145902997e-06,
+ "loss": 0.5233,
+ "step": 633
+ },
+ {
+ "epoch": 1.9832985386221296,
+ "grad_norm": 0.7635822892189026,
+ "learning_rate": 4.004960145480651e-06,
+ "loss": 0.4981,
+ "step": 634
+ },
+ {
+ "epoch": 1.9864300626304803,
+ "grad_norm": 0.8916334509849548,
+ "learning_rate": 4.0015006311776685e-06,
+ "loss": 0.5311,
+ "step": 635
+ },
+ {
+ "epoch": 1.989561586638831,
+ "grad_norm": 0.7197673320770264,
+ "learning_rate": 3.998036613370295e-06,
+ "loss": 0.5361,
+ "step": 636
+ },
+ {
+ "epoch": 1.9926931106471817,
+ "grad_norm": 0.8391228914260864,
+ "learning_rate": 3.994568102448284e-06,
+ "loss": 0.5473,
+ "step": 637
+ },
+ {
+ "epoch": 1.9958246346555324,
+ "grad_norm": 0.9371750950813293,
+ "learning_rate": 3.991095108814862e-06,
+ "loss": 0.5303,
+ "step": 638
+ },
+ {
+ "epoch": 1.998956158663883,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9876176428867046e-06,
+ "loss": 0.533,
+ "step": 639
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9841357150938984e-06,
+ "loss": 0.1831,
+ "step": 640
+ },
+ {
+ "epoch": 2.0031315240083507,
+ "grad_norm": 0.8802503347396851,
+ "learning_rate": 3.9806493358799135e-06,
+ "loss": 0.493,
+ "step": 641
+ },
+ {
+ "epoch": 2.0062630480167014,
+ "grad_norm": 0.802759051322937,
+ "learning_rate": 3.977158515701571e-06,
+ "loss": 0.498,
+ "step": 642
+ },
+ {
+ "epoch": 2.009394572025052,
+ "grad_norm": 1.0235401391983032,
+ "learning_rate": 3.973663265029013e-06,
+ "loss": 0.4887,
+ "step": 643
+ },
+ {
+ "epoch": 2.012526096033403,
+ "grad_norm": 0.7219089865684509,
+ "learning_rate": 3.97016359434567e-06,
+ "loss": 0.4628,
+ "step": 644
+ },
+ {
+ "epoch": 2.0156576200417535,
+ "grad_norm": 0.7887073755264282,
+ "learning_rate": 3.966659514148229e-06,
+ "loss": 0.525,
+ "step": 645
+ },
+ {
+ "epoch": 2.0187891440501042,
+ "grad_norm": 0.7960914969444275,
+ "learning_rate": 3.963151034946602e-06,
+ "loss": 0.4643,
+ "step": 646
+ },
+ {
+ "epoch": 2.021920668058455,
+ "grad_norm": 0.7902271151542664,
+ "learning_rate": 3.959638167263895e-06,
+ "loss": 0.4922,
+ "step": 647
+ },
+ {
+ "epoch": 2.0250521920668056,
+ "grad_norm": 0.9501478672027588,
+ "learning_rate": 3.956120921636379e-06,
+ "loss": 0.5285,
+ "step": 648
+ },
+ {
+ "epoch": 2.028183716075157,
+ "grad_norm": 0.9510527849197388,
+ "learning_rate": 3.952599308613454e-06,
+ "loss": 0.4909,
+ "step": 649
+ },
+ {
+ "epoch": 2.0313152400835075,
+ "grad_norm": 0.9408219456672668,
+ "learning_rate": 3.949073338757619e-06,
+ "loss": 0.4912,
+ "step": 650
+ },
+ {
+ "epoch": 2.034446764091858,
+ "grad_norm": 0.7148041725158691,
+ "learning_rate": 3.945543022644441e-06,
+ "loss": 0.4792,
+ "step": 651
+ },
+ {
+ "epoch": 2.037578288100209,
+ "grad_norm": 0.7737464904785156,
+ "learning_rate": 3.942008370862522e-06,
+ "loss": 0.4694,
+ "step": 652
+ },
+ {
+ "epoch": 2.0407098121085596,
+ "grad_norm": 0.8405889868736267,
+ "learning_rate": 3.938469394013472e-06,
+ "loss": 0.5048,
+ "step": 653
+ },
+ {
+ "epoch": 2.0438413361169103,
+ "grad_norm": 0.7896456718444824,
+ "learning_rate": 3.934926102711869e-06,
+ "loss": 0.4882,
+ "step": 654
+ },
+ {
+ "epoch": 2.046972860125261,
+ "grad_norm": 0.9290387034416199,
+ "learning_rate": 3.931378507585231e-06,
+ "loss": 0.503,
+ "step": 655
+ },
+ {
+ "epoch": 2.0501043841336117,
+ "grad_norm": 0.7386118769645691,
+ "learning_rate": 3.927826619273991e-06,
+ "loss": 0.4918,
+ "step": 656
+ },
+ {
+ "epoch": 2.0532359081419624,
+ "grad_norm": 0.9878676533699036,
+ "learning_rate": 3.92427044843145e-06,
+ "loss": 0.4958,
+ "step": 657
+ },
+ {
+ "epoch": 2.056367432150313,
+ "grad_norm": 1.0111151933670044,
+ "learning_rate": 3.92071000572376e-06,
+ "loss": 0.4886,
+ "step": 658
+ },
+ {
+ "epoch": 2.059498956158664,
+ "grad_norm": 0.8612061738967896,
+ "learning_rate": 3.917145301829884e-06,
+ "loss": 0.5216,
+ "step": 659
+ },
+ {
+ "epoch": 2.0626304801670146,
+ "grad_norm": 0.7458518743515015,
+ "learning_rate": 3.913576347441564e-06,
+ "loss": 0.4807,
+ "step": 660
+ },
+ {
+ "epoch": 2.0657620041753653,
+ "grad_norm": 0.7775886058807373,
+ "learning_rate": 3.910003153263294e-06,
+ "loss": 0.4837,
+ "step": 661
+ },
+ {
+ "epoch": 2.068893528183716,
+ "grad_norm": 0.7144196629524231,
+ "learning_rate": 3.906425730012282e-06,
+ "loss": 0.5081,
+ "step": 662
+ },
+ {
+ "epoch": 2.0720250521920667,
+ "grad_norm": 0.844971776008606,
+ "learning_rate": 3.9028440884184215e-06,
+ "loss": 0.474,
+ "step": 663
+ },
+ {
+ "epoch": 2.0751565762004174,
+ "grad_norm": 0.9709283113479614,
+ "learning_rate": 3.899258239224257e-06,
+ "loss": 0.503,
+ "step": 664
+ },
+ {
+ "epoch": 2.078288100208768,
+ "grad_norm": 1.1325515508651733,
+ "learning_rate": 3.895668193184954e-06,
+ "loss": 0.5058,
+ "step": 665
+ },
+ {
+ "epoch": 2.081419624217119,
+ "grad_norm": 0.7211254239082336,
+ "learning_rate": 3.892073961068266e-06,
+ "loss": 0.4982,
+ "step": 666
+ },
+ {
+ "epoch": 2.08455114822547,
+ "grad_norm": 0.8975517153739929,
+ "learning_rate": 3.888475553654502e-06,
+ "loss": 0.4699,
+ "step": 667
+ },
+ {
+ "epoch": 2.0876826722338206,
+ "grad_norm": 0.8270771503448486,
+ "learning_rate": 3.884872981736493e-06,
+ "loss": 0.4586,
+ "step": 668
+ },
+ {
+ "epoch": 2.0908141962421714,
+ "grad_norm": 0.8606625199317932,
+ "learning_rate": 3.881266256119561e-06,
+ "loss": 0.5299,
+ "step": 669
+ },
+ {
+ "epoch": 2.093945720250522,
+ "grad_norm": 0.9013976454734802,
+ "learning_rate": 3.877655387621488e-06,
+ "loss": 0.4887,
+ "step": 670
+ },
+ {
+ "epoch": 2.0970772442588728,
+ "grad_norm": 0.7603903412818909,
+ "learning_rate": 3.8740403870724795e-06,
+ "loss": 0.4992,
+ "step": 671
+ },
+ {
+ "epoch": 2.1002087682672235,
+ "grad_norm": 1.0432350635528564,
+ "learning_rate": 3.870421265315137e-06,
+ "loss": 0.5035,
+ "step": 672
+ },
+ {
+ "epoch": 2.103340292275574,
+ "grad_norm": 0.7727136611938477,
+ "learning_rate": 3.8667980332044195e-06,
+ "loss": 0.5006,
+ "step": 673
+ },
+ {
+ "epoch": 2.106471816283925,
+ "grad_norm": 0.9764307141304016,
+ "learning_rate": 3.863170701607618e-06,
+ "loss": 0.5061,
+ "step": 674
+ },
+ {
+ "epoch": 2.1096033402922756,
+ "grad_norm": 0.747818648815155,
+ "learning_rate": 3.859539281404317e-06,
+ "loss": 0.4761,
+ "step": 675
+ },
+ {
+ "epoch": 2.1127348643006263,
+ "grad_norm": 0.7254915237426758,
+ "learning_rate": 3.855903783486364e-06,
+ "loss": 0.5166,
+ "step": 676
+ },
+ {
+ "epoch": 2.115866388308977,
+ "grad_norm": 0.7678592801094055,
+ "learning_rate": 3.852264218757839e-06,
+ "loss": 0.5122,
+ "step": 677
+ },
+ {
+ "epoch": 2.1189979123173277,
+ "grad_norm": 0.8140144348144531,
+ "learning_rate": 3.8486205981350165e-06,
+ "loss": 0.4551,
+ "step": 678
+ },
+ {
+ "epoch": 2.1221294363256784,
+ "grad_norm": 0.9417359232902527,
+ "learning_rate": 3.844972932546338e-06,
+ "loss": 0.4748,
+ "step": 679
+ },
+ {
+ "epoch": 2.125260960334029,
+ "grad_norm": 0.8035290241241455,
+ "learning_rate": 3.841321232932378e-06,
+ "loss": 0.5079,
+ "step": 680
+ },
+ {
+ "epoch": 2.12839248434238,
+ "grad_norm": 0.8300641775131226,
+ "learning_rate": 3.837665510245809e-06,
+ "loss": 0.5018,
+ "step": 681
+ },
+ {
+ "epoch": 2.1315240083507305,
+ "grad_norm": 0.8293547034263611,
+ "learning_rate": 3.8340057754513715e-06,
+ "loss": 0.5042,
+ "step": 682
+ },
+ {
+ "epoch": 2.1346555323590812,
+ "grad_norm": 0.7780007719993591,
+ "learning_rate": 3.8303420395258365e-06,
+ "loss": 0.5048,
+ "step": 683
+ },
+ {
+ "epoch": 2.137787056367432,
+ "grad_norm": 0.7519420385360718,
+ "learning_rate": 3.8266743134579785e-06,
+ "loss": 0.5108,
+ "step": 684
+ },
+ {
+ "epoch": 2.140918580375783,
+ "grad_norm": 0.7872384190559387,
+ "learning_rate": 3.8230026082485404e-06,
+ "loss": 0.4924,
+ "step": 685
+ },
+ {
+ "epoch": 2.144050104384134,
+ "grad_norm": 0.7479491829872131,
+ "learning_rate": 3.819326934910197e-06,
+ "loss": 0.5184,
+ "step": 686
+ },
+ {
+ "epoch": 2.1471816283924845,
+ "grad_norm": 0.8438299298286438,
+ "learning_rate": 3.815647304467527e-06,
+ "loss": 0.4791,
+ "step": 687
+ },
+ {
+ "epoch": 2.150313152400835,
+ "grad_norm": 0.7923721671104431,
+ "learning_rate": 3.8119637279569773e-06,
+ "loss": 0.5305,
+ "step": 688
+ },
+ {
+ "epoch": 2.153444676409186,
+ "grad_norm": 0.7856534719467163,
+ "learning_rate": 3.80827621642683e-06,
+ "loss": 0.5063,
+ "step": 689
+ },
+ {
+ "epoch": 2.1565762004175366,
+ "grad_norm": 0.8544500470161438,
+ "learning_rate": 3.8045847809371706e-06,
+ "loss": 0.4989,
+ "step": 690
+ },
+ {
+ "epoch": 2.1597077244258873,
+ "grad_norm": 0.865390956401825,
+ "learning_rate": 3.800889432559852e-06,
+ "loss": 0.4931,
+ "step": 691
+ },
+ {
+ "epoch": 2.162839248434238,
+ "grad_norm": 0.9809399247169495,
+ "learning_rate": 3.797190182378466e-06,
+ "loss": 0.4785,
+ "step": 692
+ },
+ {
+ "epoch": 2.1659707724425887,
+ "grad_norm": 0.7954536080360413,
+ "learning_rate": 3.793487041488304e-06,
+ "loss": 0.4847,
+ "step": 693
+ },
+ {
+ "epoch": 2.1691022964509394,
+ "grad_norm": 0.754704475402832,
+ "learning_rate": 3.7897800209963298e-06,
+ "loss": 0.5125,
+ "step": 694
+ },
+ {
+ "epoch": 2.17223382045929,
+ "grad_norm": 0.7319822311401367,
+ "learning_rate": 3.7860691320211414e-06,
+ "loss": 0.477,
+ "step": 695
+ },
+ {
+ "epoch": 2.175365344467641,
+ "grad_norm": 0.8198635578155518,
+ "learning_rate": 3.7823543856929403e-06,
+ "loss": 0.4764,
+ "step": 696
+ },
+ {
+ "epoch": 2.1784968684759916,
+ "grad_norm": 0.708933413028717,
+ "learning_rate": 3.7786357931534987e-06,
+ "loss": 0.4948,
+ "step": 697
+ },
+ {
+ "epoch": 2.1816283924843423,
+ "grad_norm": 0.8493193984031677,
+ "learning_rate": 3.774913365556123e-06,
+ "loss": 0.5271,
+ "step": 698
+ },
+ {
+ "epoch": 2.184759916492693,
+ "grad_norm": 0.7999475002288818,
+ "learning_rate": 3.771187114065622e-06,
+ "loss": 0.4804,
+ "step": 699
+ },
+ {
+ "epoch": 2.1878914405010437,
+ "grad_norm": 0.8366796970367432,
+ "learning_rate": 3.7674570498582776e-06,
+ "loss": 0.457,
+ "step": 700
+ },
+ {
+ "epoch": 2.1910229645093944,
+ "grad_norm": 0.7935530543327332,
+ "learning_rate": 3.7637231841218015e-06,
+ "loss": 0.5001,
+ "step": 701
+ },
+ {
+ "epoch": 2.1941544885177455,
+ "grad_norm": 0.7700153589248657,
+ "learning_rate": 3.7599855280553125e-06,
+ "loss": 0.5091,
+ "step": 702
+ },
+ {
+ "epoch": 2.1972860125260962,
+ "grad_norm": 0.7991652488708496,
+ "learning_rate": 3.756244092869294e-06,
+ "loss": 0.4955,
+ "step": 703
+ },
+ {
+ "epoch": 2.200417536534447,
+ "grad_norm": 0.720051646232605,
+ "learning_rate": 3.752498889785567e-06,
+ "loss": 0.4902,
+ "step": 704
+ },
+ {
+ "epoch": 2.2035490605427976,
+ "grad_norm": 0.7312369346618652,
+ "learning_rate": 3.748749930037252e-06,
+ "loss": 0.4935,
+ "step": 705
+ },
+ {
+ "epoch": 2.2066805845511483,
+ "grad_norm": 0.8418563008308411,
+ "learning_rate": 3.744997224868739e-06,
+ "loss": 0.5186,
+ "step": 706
+ },
+ {
+ "epoch": 2.209812108559499,
+ "grad_norm": 0.8324081301689148,
+ "learning_rate": 3.741240785535649e-06,
+ "loss": 0.482,
+ "step": 707
+ },
+ {
+ "epoch": 2.2129436325678498,
+ "grad_norm": 0.8051855564117432,
+ "learning_rate": 3.737480623304805e-06,
+ "loss": 0.4663,
+ "step": 708
+ },
+ {
+ "epoch": 2.2160751565762005,
+ "grad_norm": 0.9464184641838074,
+ "learning_rate": 3.7337167494541948e-06,
+ "loss": 0.451,
+ "step": 709
+ },
+ {
+ "epoch": 2.219206680584551,
+ "grad_norm": 1.0227075815200806,
+ "learning_rate": 3.729949175272942e-06,
+ "loss": 0.4428,
+ "step": 710
+ },
+ {
+ "epoch": 2.222338204592902,
+ "grad_norm": 0.7930364012718201,
+ "learning_rate": 3.7261779120612633e-06,
+ "loss": 0.5132,
+ "step": 711
+ },
+ {
+ "epoch": 2.2254697286012526,
+ "grad_norm": 0.9033688306808472,
+ "learning_rate": 3.7224029711304444e-06,
+ "loss": 0.476,
+ "step": 712
+ },
+ {
+ "epoch": 2.2286012526096033,
+ "grad_norm": 0.8026887774467468,
+ "learning_rate": 3.7186243638028007e-06,
+ "loss": 0.4959,
+ "step": 713
+ },
+ {
+ "epoch": 2.231732776617954,
+ "grad_norm": 0.9391745328903198,
+ "learning_rate": 3.714842101411642e-06,
+ "loss": 0.4962,
+ "step": 714
+ },
+ {
+ "epoch": 2.2348643006263047,
+ "grad_norm": 0.7774361371994019,
+ "learning_rate": 3.711056195301245e-06,
+ "loss": 0.4748,
+ "step": 715
+ },
+ {
+ "epoch": 2.2379958246346554,
+ "grad_norm": 0.9278722405433655,
+ "learning_rate": 3.7072666568268115e-06,
+ "loss": 0.5074,
+ "step": 716
+ },
+ {
+ "epoch": 2.241127348643006,
+ "grad_norm": 0.771423876285553,
+ "learning_rate": 3.7034734973544406e-06,
+ "loss": 0.5072,
+ "step": 717
+ },
+ {
+ "epoch": 2.244258872651357,
+ "grad_norm": 0.8707448244094849,
+ "learning_rate": 3.6996767282610892e-06,
+ "loss": 0.4851,
+ "step": 718
+ },
+ {
+ "epoch": 2.2473903966597075,
+ "grad_norm": 0.7641019821166992,
+ "learning_rate": 3.695876360934543e-06,
+ "loss": 0.4941,
+ "step": 719
+ },
+ {
+ "epoch": 2.2505219206680582,
+ "grad_norm": 0.7647167444229126,
+ "learning_rate": 3.69207240677338e-06,
+ "loss": 0.5225,
+ "step": 720
+ },
+ {
+ "epoch": 2.2536534446764094,
+ "grad_norm": 0.9108865261077881,
+ "learning_rate": 3.6882648771869345e-06,
+ "loss": 0.454,
+ "step": 721
+ },
+ {
+ "epoch": 2.25678496868476,
+ "grad_norm": 0.86728835105896,
+ "learning_rate": 3.6844537835952666e-06,
+ "loss": 0.4461,
+ "step": 722
+ },
+ {
+ "epoch": 2.259916492693111,
+ "grad_norm": 1.1055282354354858,
+ "learning_rate": 3.6806391374291238e-06,
+ "loss": 0.4618,
+ "step": 723
+ },
+ {
+ "epoch": 2.2630480167014615,
+ "grad_norm": 0.7591858506202698,
+ "learning_rate": 3.6768209501299116e-06,
+ "loss": 0.4901,
+ "step": 724
+ },
+ {
+ "epoch": 2.266179540709812,
+ "grad_norm": 0.7966359257698059,
+ "learning_rate": 3.6729992331496554e-06,
+ "loss": 0.5171,
+ "step": 725
+ },
+ {
+ "epoch": 2.269311064718163,
+ "grad_norm": 0.983161211013794,
+ "learning_rate": 3.6691739979509672e-06,
+ "loss": 0.4949,
+ "step": 726
+ },
+ {
+ "epoch": 2.2724425887265136,
+ "grad_norm": 0.9200037121772766,
+ "learning_rate": 3.6653452560070106e-06,
+ "loss": 0.5234,
+ "step": 727
+ },
+ {
+ "epoch": 2.2755741127348643,
+ "grad_norm": 1.0288461446762085,
+ "learning_rate": 3.6615130188014685e-06,
+ "loss": 0.4713,
+ "step": 728
+ },
+ {
+ "epoch": 2.278705636743215,
+ "grad_norm": 0.7325463891029358,
+ "learning_rate": 3.6576772978285065e-06,
+ "loss": 0.527,
+ "step": 729
+ },
+ {
+ "epoch": 2.2818371607515657,
+ "grad_norm": 1.0045446157455444,
+ "learning_rate": 3.6538381045927395e-06,
+ "loss": 0.5139,
+ "step": 730
+ },
+ {
+ "epoch": 2.2849686847599164,
+ "grad_norm": 0.7391849756240845,
+ "learning_rate": 3.6499954506091963e-06,
+ "loss": 0.4829,
+ "step": 731
+ },
+ {
+ "epoch": 2.288100208768267,
+ "grad_norm": 0.7808229923248291,
+ "learning_rate": 3.646149347403286e-06,
+ "loss": 0.4831,
+ "step": 732
+ },
+ {
+ "epoch": 2.291231732776618,
+ "grad_norm": 0.7056961059570312,
+ "learning_rate": 3.6422998065107628e-06,
+ "loss": 0.5066,
+ "step": 733
+ },
+ {
+ "epoch": 2.2943632567849686,
+ "grad_norm": 0.7498443126678467,
+ "learning_rate": 3.6384468394776935e-06,
+ "loss": 0.4724,
+ "step": 734
+ },
+ {
+ "epoch": 2.2974947807933193,
+ "grad_norm": 0.8511576056480408,
+ "learning_rate": 3.634590457860418e-06,
+ "loss": 0.5286,
+ "step": 735
+ },
+ {
+ "epoch": 2.30062630480167,
+ "grad_norm": 0.873635470867157,
+ "learning_rate": 3.63073067322552e-06,
+ "loss": 0.4751,
+ "step": 736
+ },
+ {
+ "epoch": 2.3037578288100207,
+ "grad_norm": 0.7427377104759216,
+ "learning_rate": 3.626867497149788e-06,
+ "loss": 0.475,
+ "step": 737
+ },
+ {
+ "epoch": 2.306889352818372,
+ "grad_norm": 1.0591017007827759,
+ "learning_rate": 3.623000941220186e-06,
+ "loss": 0.4591,
+ "step": 738
+ },
+ {
+ "epoch": 2.3100208768267225,
+ "grad_norm": 0.8767879009246826,
+ "learning_rate": 3.6191310170338114e-06,
+ "loss": 0.4673,
+ "step": 739
+ },
+ {
+ "epoch": 2.3131524008350732,
+ "grad_norm": 0.9156234860420227,
+ "learning_rate": 3.615257736197866e-06,
+ "loss": 0.4622,
+ "step": 740
+ },
+ {
+ "epoch": 2.316283924843424,
+ "grad_norm": 0.6743756532669067,
+ "learning_rate": 3.611381110329619e-06,
+ "loss": 0.4723,
+ "step": 741
+ },
+ {
+ "epoch": 2.3194154488517746,
+ "grad_norm": 0.8655558228492737,
+ "learning_rate": 3.6075011510563732e-06,
+ "loss": 0.471,
+ "step": 742
+ },
+ {
+ "epoch": 2.3225469728601253,
+ "grad_norm": 0.7652033567428589,
+ "learning_rate": 3.603617870015429e-06,
+ "loss": 0.5155,
+ "step": 743
+ },
+ {
+ "epoch": 2.325678496868476,
+ "grad_norm": 0.7970699667930603,
+ "learning_rate": 3.599731278854049e-06,
+ "loss": 0.4507,
+ "step": 744
+ },
+ {
+ "epoch": 2.3288100208768268,
+ "grad_norm": 0.7538278698921204,
+ "learning_rate": 3.5958413892294253e-06,
+ "loss": 0.5093,
+ "step": 745
+ },
+ {
+ "epoch": 2.3319415448851775,
+ "grad_norm": 0.735996663570404,
+ "learning_rate": 3.5919482128086414e-06,
+ "loss": 0.5008,
+ "step": 746
+ },
+ {
+ "epoch": 2.335073068893528,
+ "grad_norm": 0.7643904685974121,
+ "learning_rate": 3.588051761268642e-06,
+ "loss": 0.5072,
+ "step": 747
+ },
+ {
+ "epoch": 2.338204592901879,
+ "grad_norm": 0.7646260857582092,
+ "learning_rate": 3.584152046296191e-06,
+ "loss": 0.4578,
+ "step": 748
+ },
+ {
+ "epoch": 2.3413361169102296,
+ "grad_norm": 0.7873825430870056,
+ "learning_rate": 3.5802490795878446e-06,
+ "loss": 0.5249,
+ "step": 749
+ },
+ {
+ "epoch": 2.3444676409185803,
+ "grad_norm": 1.095333218574524,
+ "learning_rate": 3.5763428728499095e-06,
+ "loss": 0.4913,
+ "step": 750
+ },
+ {
+ "epoch": 2.347599164926931,
+ "grad_norm": 1.3425395488739014,
+ "learning_rate": 3.5724334377984107e-06,
+ "loss": 0.5317,
+ "step": 751
+ },
+ {
+ "epoch": 2.3507306889352817,
+ "grad_norm": 0.7151113748550415,
+ "learning_rate": 3.568520786159055e-06,
+ "loss": 0.5135,
+ "step": 752
+ },
+ {
+ "epoch": 2.3538622129436324,
+ "grad_norm": 0.8072878122329712,
+ "learning_rate": 3.5646049296672004e-06,
+ "loss": 0.4863,
+ "step": 753
+ },
+ {
+ "epoch": 2.356993736951983,
+ "grad_norm": 0.8040189743041992,
+ "learning_rate": 3.5606858800678123e-06,
+ "loss": 0.4668,
+ "step": 754
+ },
+ {
+ "epoch": 2.3601252609603343,
+ "grad_norm": 0.7749765515327454,
+ "learning_rate": 3.5567636491154385e-06,
+ "loss": 0.4681,
+ "step": 755
+ },
+ {
+ "epoch": 2.3632567849686845,
+ "grad_norm": 0.773013710975647,
+ "learning_rate": 3.5528382485741638e-06,
+ "loss": 0.5012,
+ "step": 756
+ },
+ {
+ "epoch": 2.3663883089770357,
+ "grad_norm": 0.7017714381217957,
+ "learning_rate": 3.5489096902175835e-06,
+ "loss": 0.5019,
+ "step": 757
+ },
+ {
+ "epoch": 2.3695198329853864,
+ "grad_norm": 1.132458209991455,
+ "learning_rate": 3.5449779858287625e-06,
+ "loss": 0.5131,
+ "step": 758
+ },
+ {
+ "epoch": 2.372651356993737,
+ "grad_norm": 0.7624574899673462,
+ "learning_rate": 3.541043147200202e-06,
+ "loss": 0.4856,
+ "step": 759
+ },
+ {
+ "epoch": 2.375782881002088,
+ "grad_norm": 0.9078478217124939,
+ "learning_rate": 3.5371051861338036e-06,
+ "loss": 0.4337,
+ "step": 760
+ },
+ {
+ "epoch": 2.3789144050104385,
+ "grad_norm": 0.8608354330062866,
+ "learning_rate": 3.5331641144408344e-06,
+ "loss": 0.5053,
+ "step": 761
+ },
+ {
+ "epoch": 2.382045929018789,
+ "grad_norm": 0.775047779083252,
+ "learning_rate": 3.529219943941892e-06,
+ "loss": 0.4779,
+ "step": 762
+ },
+ {
+ "epoch": 2.38517745302714,
+ "grad_norm": 0.7775866389274597,
+ "learning_rate": 3.525272686466866e-06,
+ "loss": 0.4979,
+ "step": 763
+ },
+ {
+ "epoch": 2.3883089770354906,
+ "grad_norm": 0.9386464357376099,
+ "learning_rate": 3.521322353854908e-06,
+ "loss": 0.5222,
+ "step": 764
+ },
+ {
+ "epoch": 2.3914405010438413,
+ "grad_norm": 0.874109148979187,
+ "learning_rate": 3.517368957954391e-06,
+ "loss": 0.4681,
+ "step": 765
+ },
+ {
+ "epoch": 2.394572025052192,
+ "grad_norm": 0.824588418006897,
+ "learning_rate": 3.5134125106228766e-06,
+ "loss": 0.4955,
+ "step": 766
+ },
+ {
+ "epoch": 2.3977035490605427,
+ "grad_norm": 0.8790764808654785,
+ "learning_rate": 3.5094530237270774e-06,
+ "loss": 0.4722,
+ "step": 767
+ },
+ {
+ "epoch": 2.4008350730688934,
+ "grad_norm": 1.1399786472320557,
+ "learning_rate": 3.5054905091428253e-06,
+ "loss": 0.4771,
+ "step": 768
+ },
+ {
+ "epoch": 2.403966597077244,
+ "grad_norm": 1.2586532831192017,
+ "learning_rate": 3.50152497875503e-06,
+ "loss": 0.4849,
+ "step": 769
+ },
+ {
+ "epoch": 2.407098121085595,
+ "grad_norm": 0.7706464529037476,
+ "learning_rate": 3.4975564444576487e-06,
+ "loss": 0.477,
+ "step": 770
+ },
+ {
+ "epoch": 2.4102296450939455,
+ "grad_norm": 0.7695909142494202,
+ "learning_rate": 3.4935849181536484e-06,
+ "loss": 0.4695,
+ "step": 771
+ },
+ {
+ "epoch": 2.4133611691022967,
+ "grad_norm": 0.7744433283805847,
+ "learning_rate": 3.489610411754969e-06,
+ "loss": 0.499,
+ "step": 772
+ },
+ {
+ "epoch": 2.416492693110647,
+ "grad_norm": 0.9265744686126709,
+ "learning_rate": 3.48563293718249e-06,
+ "loss": 0.481,
+ "step": 773
+ },
+ {
+ "epoch": 2.419624217118998,
+ "grad_norm": 1.0680506229400635,
+ "learning_rate": 3.481652506365992e-06,
+ "loss": 0.4898,
+ "step": 774
+ },
+ {
+ "epoch": 2.422755741127349,
+ "grad_norm": 0.721493661403656,
+ "learning_rate": 3.477669131244122e-06,
+ "loss": 0.4813,
+ "step": 775
+ },
+ {
+ "epoch": 2.4258872651356995,
+ "grad_norm": 0.7993559837341309,
+ "learning_rate": 3.4736828237643616e-06,
+ "loss": 0.5179,
+ "step": 776
+ },
+ {
+ "epoch": 2.4290187891440502,
+ "grad_norm": 0.8148090839385986,
+ "learning_rate": 3.4696935958829837e-06,
+ "loss": 0.4753,
+ "step": 777
+ },
+ {
+ "epoch": 2.432150313152401,
+ "grad_norm": 0.8006406426429749,
+ "learning_rate": 3.465701459565022e-06,
+ "loss": 0.501,
+ "step": 778
+ },
+ {
+ "epoch": 2.4352818371607516,
+ "grad_norm": 0.9307970404624939,
+ "learning_rate": 3.4617064267842327e-06,
+ "loss": 0.487,
+ "step": 779
+ },
+ {
+ "epoch": 2.4384133611691023,
+ "grad_norm": 0.7192814946174622,
+ "learning_rate": 3.45770850952306e-06,
+ "loss": 0.4769,
+ "step": 780
+ },
+ {
+ "epoch": 2.441544885177453,
+ "grad_norm": 0.7386271953582764,
+ "learning_rate": 3.4537077197726023e-06,
+ "loss": 0.4726,
+ "step": 781
+ },
+ {
+ "epoch": 2.4446764091858038,
+ "grad_norm": 0.8006314039230347,
+ "learning_rate": 3.449704069532567e-06,
+ "loss": 0.494,
+ "step": 782
+ },
+ {
+ "epoch": 2.4478079331941545,
+ "grad_norm": 0.7466752529144287,
+ "learning_rate": 3.4456975708112477e-06,
+ "loss": 0.4778,
+ "step": 783
+ },
+ {
+ "epoch": 2.450939457202505,
+ "grad_norm": 0.8348856568336487,
+ "learning_rate": 3.4416882356254777e-06,
+ "loss": 0.4766,
+ "step": 784
+ },
+ {
+ "epoch": 2.454070981210856,
+ "grad_norm": 0.754851758480072,
+ "learning_rate": 3.4376760760005994e-06,
+ "loss": 0.4673,
+ "step": 785
+ },
+ {
+ "epoch": 2.4572025052192066,
+ "grad_norm": 0.7854018807411194,
+ "learning_rate": 3.433661103970427e-06,
+ "loss": 0.4954,
+ "step": 786
+ },
+ {
+ "epoch": 2.4603340292275573,
+ "grad_norm": 0.7238256931304932,
+ "learning_rate": 3.4296433315772084e-06,
+ "loss": 0.496,
+ "step": 787
+ },
+ {
+ "epoch": 2.463465553235908,
+ "grad_norm": 0.7007659673690796,
+ "learning_rate": 3.4256227708715915e-06,
+ "loss": 0.4793,
+ "step": 788
+ },
+ {
+ "epoch": 2.4665970772442587,
+ "grad_norm": 0.7234371900558472,
+ "learning_rate": 3.421599433912588e-06,
+ "loss": 0.4935,
+ "step": 789
+ },
+ {
+ "epoch": 2.4697286012526094,
+ "grad_norm": 0.7537544965744019,
+ "learning_rate": 3.4175733327675355e-06,
+ "loss": 0.5194,
+ "step": 790
+ },
+ {
+ "epoch": 2.4728601252609606,
+ "grad_norm": 0.7608047127723694,
+ "learning_rate": 3.4135444795120633e-06,
+ "loss": 0.4793,
+ "step": 791
+ },
+ {
+ "epoch": 2.4759916492693113,
+ "grad_norm": 0.7847898006439209,
+ "learning_rate": 3.4095128862300542e-06,
+ "loss": 0.4877,
+ "step": 792
+ },
+ {
+ "epoch": 2.479123173277662,
+ "grad_norm": 0.8002011179924011,
+ "learning_rate": 3.405478565013609e-06,
+ "loss": 0.4927,
+ "step": 793
+ },
+ {
+ "epoch": 2.4822546972860127,
+ "grad_norm": 0.8200219869613647,
+ "learning_rate": 3.401441527963013e-06,
+ "loss": 0.4997,
+ "step": 794
+ },
+ {
+ "epoch": 2.4853862212943634,
+ "grad_norm": 0.7220162749290466,
+ "learning_rate": 3.3974017871866938e-06,
+ "loss": 0.4668,
+ "step": 795
+ },
+ {
+ "epoch": 2.488517745302714,
+ "grad_norm": 0.8022251129150391,
+ "learning_rate": 3.3933593548011912e-06,
+ "loss": 0.5179,
+ "step": 796
+ },
+ {
+ "epoch": 2.491649269311065,
+ "grad_norm": 0.7914465069770813,
+ "learning_rate": 3.389314242931115e-06,
+ "loss": 0.4943,
+ "step": 797
+ },
+ {
+ "epoch": 2.4947807933194155,
+ "grad_norm": 1.1399403810501099,
+ "learning_rate": 3.385266463709116e-06,
+ "loss": 0.4896,
+ "step": 798
+ },
+ {
+ "epoch": 2.497912317327766,
+ "grad_norm": 0.8098909854888916,
+ "learning_rate": 3.38121602927584e-06,
+ "loss": 0.4904,
+ "step": 799
+ },
+ {
+ "epoch": 2.501043841336117,
+ "grad_norm": 0.7434052228927612,
+ "learning_rate": 3.377162951779902e-06,
+ "loss": 0.4864,
+ "step": 800
+ },
+ {
+ "epoch": 2.5041753653444676,
+ "grad_norm": 0.7397809624671936,
+ "learning_rate": 3.3731072433778407e-06,
+ "loss": 0.486,
+ "step": 801
+ },
+ {
+ "epoch": 2.5073068893528183,
+ "grad_norm": 0.99027019739151,
+ "learning_rate": 3.3690489162340867e-06,
+ "loss": 0.5011,
+ "step": 802
+ },
+ {
+ "epoch": 2.510438413361169,
+ "grad_norm": 0.8443610668182373,
+ "learning_rate": 3.3649879825209246e-06,
+ "loss": 0.455,
+ "step": 803
+ },
+ {
+ "epoch": 2.5135699373695197,
+ "grad_norm": 0.755649983882904,
+ "learning_rate": 3.3609244544184604e-06,
+ "loss": 0.4563,
+ "step": 804
+ },
+ {
+ "epoch": 2.5167014613778704,
+ "grad_norm": 0.728018045425415,
+ "learning_rate": 3.3568583441145765e-06,
+ "loss": 0.471,
+ "step": 805
+ },
+ {
+ "epoch": 2.519832985386221,
+ "grad_norm": 0.7777130603790283,
+ "learning_rate": 3.352789663804904e-06,
+ "loss": 0.4667,
+ "step": 806
+ },
+ {
+ "epoch": 2.522964509394572,
+ "grad_norm": 0.7545619606971741,
+ "learning_rate": 3.3487184256927785e-06,
+ "loss": 0.4915,
+ "step": 807
+ },
+ {
+ "epoch": 2.526096033402923,
+ "grad_norm": 0.8374579548835754,
+ "learning_rate": 3.3446446419892127e-06,
+ "loss": 0.485,
+ "step": 808
+ },
+ {
+ "epoch": 2.5292275574112733,
+ "grad_norm": 0.7354666590690613,
+ "learning_rate": 3.340568324912849e-06,
+ "loss": 0.5254,
+ "step": 809
+ },
+ {
+ "epoch": 2.5323590814196244,
+ "grad_norm": 0.7581545114517212,
+ "learning_rate": 3.3364894866899324e-06,
+ "loss": 0.4483,
+ "step": 810
+ },
+ {
+ "epoch": 2.535490605427975,
+ "grad_norm": 0.8077559471130371,
+ "learning_rate": 3.3324081395542662e-06,
+ "loss": 0.5022,
+ "step": 811
+ },
+ {
+ "epoch": 2.538622129436326,
+ "grad_norm": 0.8827865719795227,
+ "learning_rate": 3.3283242957471806e-06,
+ "loss": 0.4909,
+ "step": 812
+ },
+ {
+ "epoch": 2.5417536534446765,
+ "grad_norm": 0.9139482378959656,
+ "learning_rate": 3.3242379675174953e-06,
+ "loss": 0.5205,
+ "step": 813
+ },
+ {
+ "epoch": 2.5448851774530272,
+ "grad_norm": 0.7616812586784363,
+ "learning_rate": 3.3201491671214797e-06,
+ "loss": 0.4744,
+ "step": 814
+ },
+ {
+ "epoch": 2.548016701461378,
+ "grad_norm": 0.987173318862915,
+ "learning_rate": 3.3160579068228183e-06,
+ "loss": 0.4876,
+ "step": 815
+ },
+ {
+ "epoch": 2.5511482254697286,
+ "grad_norm": 1.259137749671936,
+ "learning_rate": 3.311964198892574e-06,
+ "loss": 0.454,
+ "step": 816
+ },
+ {
+ "epoch": 2.5542797494780793,
+ "grad_norm": 0.7866336703300476,
+ "learning_rate": 3.3078680556091513e-06,
+ "loss": 0.5107,
+ "step": 817
+ },
+ {
+ "epoch": 2.55741127348643,
+ "grad_norm": 0.9311352372169495,
+ "learning_rate": 3.303769489258258e-06,
+ "loss": 0.4843,
+ "step": 818
+ },
+ {
+ "epoch": 2.5605427974947808,
+ "grad_norm": 0.8556346893310547,
+ "learning_rate": 3.299668512132872e-06,
+ "loss": 0.5017,
+ "step": 819
+ },
+ {
+ "epoch": 2.5636743215031315,
+ "grad_norm": 2.810598373413086,
+ "learning_rate": 3.2955651365331988e-06,
+ "loss": 0.5223,
+ "step": 820
+ },
+ {
+ "epoch": 2.566805845511482,
+ "grad_norm": 1.0120766162872314,
+ "learning_rate": 3.29145937476664e-06,
+ "loss": 0.4959,
+ "step": 821
+ },
+ {
+ "epoch": 2.569937369519833,
+ "grad_norm": 0.751412034034729,
+ "learning_rate": 3.287351239147752e-06,
+ "loss": 0.4941,
+ "step": 822
+ },
+ {
+ "epoch": 2.5730688935281836,
+ "grad_norm": 1.9308148622512817,
+ "learning_rate": 3.2832407419982136e-06,
+ "loss": 0.4965,
+ "step": 823
+ },
+ {
+ "epoch": 2.5762004175365343,
+ "grad_norm": 0.9215649962425232,
+ "learning_rate": 3.279127895646786e-06,
+ "loss": 0.5071,
+ "step": 824
+ },
+ {
+ "epoch": 2.5793319415448854,
+ "grad_norm": 0.7599574327468872,
+ "learning_rate": 3.2750127124292754e-06,
+ "loss": 0.5191,
+ "step": 825
+ },
+ {
+ "epoch": 2.5824634655532357,
+ "grad_norm": 0.8234940767288208,
+ "learning_rate": 3.270895204688496e-06,
+ "loss": 0.4947,
+ "step": 826
+ },
+ {
+ "epoch": 2.585594989561587,
+ "grad_norm": 0.8401572704315186,
+ "learning_rate": 3.266775384774238e-06,
+ "loss": 0.4547,
+ "step": 827
+ },
+ {
+ "epoch": 2.588726513569937,
+ "grad_norm": 0.8927991986274719,
+ "learning_rate": 3.262653265043223e-06,
+ "loss": 0.4296,
+ "step": 828
+ },
+ {
+ "epoch": 2.5918580375782883,
+ "grad_norm": 0.8009241223335266,
+ "learning_rate": 3.2585288578590716e-06,
+ "loss": 0.4578,
+ "step": 829
+ },
+ {
+ "epoch": 2.594989561586639,
+ "grad_norm": 0.7982021570205688,
+ "learning_rate": 3.2544021755922663e-06,
+ "loss": 0.4961,
+ "step": 830
+ },
+ {
+ "epoch": 2.5981210855949897,
+ "grad_norm": 0.7096095681190491,
+ "learning_rate": 3.2502732306201112e-06,
+ "loss": 0.4975,
+ "step": 831
+ },
+ {
+ "epoch": 2.6012526096033404,
+ "grad_norm": 1.1092045307159424,
+ "learning_rate": 3.246142035326699e-06,
+ "loss": 0.4705,
+ "step": 832
+ },
+ {
+ "epoch": 2.604384133611691,
+ "grad_norm": 0.785799503326416,
+ "learning_rate": 3.24200860210287e-06,
+ "loss": 0.479,
+ "step": 833
+ },
+ {
+ "epoch": 2.607515657620042,
+ "grad_norm": 0.7315773367881775,
+ "learning_rate": 3.2378729433461804e-06,
+ "loss": 0.5036,
+ "step": 834
+ },
+ {
+ "epoch": 2.6106471816283925,
+ "grad_norm": 0.7840189337730408,
+ "learning_rate": 3.233735071460856e-06,
+ "loss": 0.4967,
+ "step": 835
+ },
+ {
+ "epoch": 2.613778705636743,
+ "grad_norm": 0.7186565399169922,
+ "learning_rate": 3.2295949988577655e-06,
+ "loss": 0.4889,
+ "step": 836
+ },
+ {
+ "epoch": 2.616910229645094,
+ "grad_norm": 0.766054093837738,
+ "learning_rate": 3.2254527379543747e-06,
+ "loss": 0.539,
+ "step": 837
+ },
+ {
+ "epoch": 2.6200417536534446,
+ "grad_norm": 0.7705381512641907,
+ "learning_rate": 3.2213083011747165e-06,
+ "loss": 0.4968,
+ "step": 838
+ },
+ {
+ "epoch": 2.6231732776617953,
+ "grad_norm": 1.3530604839324951,
+ "learning_rate": 3.217161700949346e-06,
+ "loss": 0.52,
+ "step": 839
+ },
+ {
+ "epoch": 2.626304801670146,
+ "grad_norm": 0.737389862537384,
+ "learning_rate": 3.2130129497153107e-06,
+ "loss": 0.4823,
+ "step": 840
+ },
+ {
+ "epoch": 2.6294363256784967,
+ "grad_norm": 0.9121193885803223,
+ "learning_rate": 3.2088620599161064e-06,
+ "loss": 0.4592,
+ "step": 841
+ },
+ {
+ "epoch": 2.632567849686848,
+ "grad_norm": 0.8869616389274597,
+ "learning_rate": 3.2047090440016464e-06,
+ "loss": 0.5001,
+ "step": 842
+ },
+ {
+ "epoch": 2.635699373695198,
+ "grad_norm": 0.8447219133377075,
+ "learning_rate": 3.200553914428219e-06,
+ "loss": 0.4969,
+ "step": 843
+ },
+ {
+ "epoch": 2.6388308977035493,
+ "grad_norm": 0.8877657055854797,
+ "learning_rate": 3.1963966836584524e-06,
+ "loss": 0.4718,
+ "step": 844
+ },
+ {
+ "epoch": 2.6419624217118995,
+ "grad_norm": 1.045272946357727,
+ "learning_rate": 3.192237364161277e-06,
+ "loss": 0.4864,
+ "step": 845
+ },
+ {
+ "epoch": 2.6450939457202507,
+ "grad_norm": 0.8485913276672363,
+ "learning_rate": 3.1880759684118876e-06,
+ "loss": 0.4688,
+ "step": 846
+ },
+ {
+ "epoch": 2.6482254697286014,
+ "grad_norm": 0.7328930497169495,
+ "learning_rate": 3.183912508891709e-06,
+ "loss": 0.4728,
+ "step": 847
+ },
+ {
+ "epoch": 2.651356993736952,
+ "grad_norm": 0.7377315759658813,
+ "learning_rate": 3.179746998088351e-06,
+ "loss": 0.4672,
+ "step": 848
+ },
+ {
+ "epoch": 2.654488517745303,
+ "grad_norm": 0.8017002940177917,
+ "learning_rate": 3.1755794484955817e-06,
+ "loss": 0.4884,
+ "step": 849
+ },
+ {
+ "epoch": 2.6576200417536535,
+ "grad_norm": 1.045470952987671,
+ "learning_rate": 3.171409872613278e-06,
+ "loss": 0.4789,
+ "step": 850
+ },
+ {
+ "epoch": 2.6607515657620042,
+ "grad_norm": 0.8823987245559692,
+ "learning_rate": 3.1672382829473997e-06,
+ "loss": 0.5117,
+ "step": 851
+ },
+ {
+ "epoch": 2.663883089770355,
+ "grad_norm": 0.7395204901695251,
+ "learning_rate": 3.163064692009944e-06,
+ "loss": 0.5476,
+ "step": 852
+ },
+ {
+ "epoch": 2.6670146137787056,
+ "grad_norm": 0.7778941988945007,
+ "learning_rate": 3.1588891123189103e-06,
+ "loss": 0.5092,
+ "step": 853
+ },
+ {
+ "epoch": 2.6701461377870563,
+ "grad_norm": 0.8072531819343567,
+ "learning_rate": 3.1547115563982643e-06,
+ "loss": 0.4961,
+ "step": 854
+ },
+ {
+ "epoch": 2.673277661795407,
+ "grad_norm": 0.9018139243125916,
+ "learning_rate": 3.1505320367778993e-06,
+ "loss": 0.4624,
+ "step": 855
+ },
+ {
+ "epoch": 2.6764091858037578,
+ "grad_norm": 0.8554450869560242,
+ "learning_rate": 3.1463505659935957e-06,
+ "loss": 0.4971,
+ "step": 856
+ },
+ {
+ "epoch": 2.6795407098121085,
+ "grad_norm": 0.7727259397506714,
+ "learning_rate": 3.14216715658699e-06,
+ "loss": 0.4544,
+ "step": 857
+ },
+ {
+ "epoch": 2.682672233820459,
+ "grad_norm": 0.9253409504890442,
+ "learning_rate": 3.137981821105529e-06,
+ "loss": 0.4893,
+ "step": 858
+ },
+ {
+ "epoch": 2.68580375782881,
+ "grad_norm": 0.8809456825256348,
+ "learning_rate": 3.1337945721024403e-06,
+ "loss": 0.5242,
+ "step": 859
+ },
+ {
+ "epoch": 2.6889352818371606,
+ "grad_norm": 0.981755256652832,
+ "learning_rate": 3.129605422136689e-06,
+ "loss": 0.4686,
+ "step": 860
+ },
+ {
+ "epoch": 2.6920668058455117,
+ "grad_norm": 1.1278467178344727,
+ "learning_rate": 3.1254143837729412e-06,
+ "loss": 0.4813,
+ "step": 861
+ },
+ {
+ "epoch": 2.695198329853862,
+ "grad_norm": 0.8529123663902283,
+ "learning_rate": 3.1212214695815285e-06,
+ "loss": 0.4723,
+ "step": 862
+ },
+ {
+ "epoch": 2.698329853862213,
+ "grad_norm": 0.7764189839363098,
+ "learning_rate": 3.1170266921384075e-06,
+ "loss": 0.4777,
+ "step": 863
+ },
+ {
+ "epoch": 2.701461377870564,
+ "grad_norm": 0.7364740967750549,
+ "learning_rate": 3.112830064025124e-06,
+ "loss": 0.4975,
+ "step": 864
+ },
+ {
+ "epoch": 2.7045929018789145,
+ "grad_norm": 0.7594549059867859,
+ "learning_rate": 3.108631597828774e-06,
+ "loss": 0.5083,
+ "step": 865
+ },
+ {
+ "epoch": 2.7077244258872653,
+ "grad_norm": 0.7337073683738708,
+ "learning_rate": 3.104431306141968e-06,
+ "loss": 0.4778,
+ "step": 866
+ },
+ {
+ "epoch": 2.710855949895616,
+ "grad_norm": 0.7709932327270508,
+ "learning_rate": 3.1002292015627894e-06,
+ "loss": 0.4754,
+ "step": 867
+ },
+ {
+ "epoch": 2.7139874739039667,
+ "grad_norm": 0.8001313209533691,
+ "learning_rate": 3.0960252966947605e-06,
+ "loss": 0.4489,
+ "step": 868
+ },
+ {
+ "epoch": 2.7171189979123174,
+ "grad_norm": 0.8280592560768127,
+ "learning_rate": 3.091819604146804e-06,
+ "loss": 0.4606,
+ "step": 869
+ },
+ {
+ "epoch": 2.720250521920668,
+ "grad_norm": 0.7463534474372864,
+ "learning_rate": 3.0876121365332024e-06,
+ "loss": 0.5168,
+ "step": 870
+ },
+ {
+ "epoch": 2.723382045929019,
+ "grad_norm": 0.9011222124099731,
+ "learning_rate": 3.0834029064735636e-06,
+ "loss": 0.5163,
+ "step": 871
+ },
+ {
+ "epoch": 2.7265135699373695,
+ "grad_norm": 0.7811456322669983,
+ "learning_rate": 3.0791919265927827e-06,
+ "loss": 0.5004,
+ "step": 872
+ },
+ {
+ "epoch": 2.72964509394572,
+ "grad_norm": 0.9251837134361267,
+ "learning_rate": 3.0749792095210003e-06,
+ "loss": 0.5081,
+ "step": 873
+ },
+ {
+ "epoch": 2.732776617954071,
+ "grad_norm": 0.8347085118293762,
+ "learning_rate": 3.0707647678935695e-06,
+ "loss": 0.4793,
+ "step": 874
+ },
+ {
+ "epoch": 2.7359081419624216,
+ "grad_norm": 0.9766442179679871,
+ "learning_rate": 3.0665486143510153e-06,
+ "loss": 0.493,
+ "step": 875
+ },
+ {
+ "epoch": 2.7390396659707723,
+ "grad_norm": 0.7692548036575317,
+ "learning_rate": 3.0623307615389975e-06,
+ "loss": 0.4874,
+ "step": 876
+ },
+ {
+ "epoch": 2.742171189979123,
+ "grad_norm": 0.7714599370956421,
+ "learning_rate": 3.0581112221082727e-06,
+ "loss": 0.4929,
+ "step": 877
+ },
+ {
+ "epoch": 2.745302713987474,
+ "grad_norm": 0.7797786593437195,
+ "learning_rate": 3.053890008714655e-06,
+ "loss": 0.4359,
+ "step": 878
+ },
+ {
+ "epoch": 2.7484342379958244,
+ "grad_norm": 5.118397235870361,
+ "learning_rate": 3.049667134018981e-06,
+ "loss": 0.4634,
+ "step": 879
+ },
+ {
+ "epoch": 2.7515657620041756,
+ "grad_norm": 0.7684539556503296,
+ "learning_rate": 3.04544261068707e-06,
+ "loss": 0.4688,
+ "step": 880
+ },
+ {
+ "epoch": 2.754697286012526,
+ "grad_norm": 0.8678610920906067,
+ "learning_rate": 3.0412164513896846e-06,
+ "loss": 0.5213,
+ "step": 881
+ },
+ {
+ "epoch": 2.757828810020877,
+ "grad_norm": 0.80293869972229,
+ "learning_rate": 3.0369886688024954e-06,
+ "loss": 0.4392,
+ "step": 882
+ },
+ {
+ "epoch": 2.7609603340292277,
+ "grad_norm": 0.7438644766807556,
+ "learning_rate": 3.0327592756060412e-06,
+ "loss": 0.528,
+ "step": 883
+ },
+ {
+ "epoch": 2.7640918580375784,
+ "grad_norm": 0.7701645493507385,
+ "learning_rate": 3.0285282844856917e-06,
+ "loss": 0.504,
+ "step": 884
+ },
+ {
+ "epoch": 2.767223382045929,
+ "grad_norm": 0.7113856673240662,
+ "learning_rate": 3.024295708131611e-06,
+ "loss": 0.4819,
+ "step": 885
+ },
+ {
+ "epoch": 2.77035490605428,
+ "grad_norm": 1.2697532176971436,
+ "learning_rate": 3.020061559238714e-06,
+ "loss": 0.5009,
+ "step": 886
+ },
+ {
+ "epoch": 2.7734864300626305,
+ "grad_norm": 1.0299439430236816,
+ "learning_rate": 3.015825850506636e-06,
+ "loss": 0.4707,
+ "step": 887
+ },
+ {
+ "epoch": 2.776617954070981,
+ "grad_norm": 0.9703660607337952,
+ "learning_rate": 3.011588594639688e-06,
+ "loss": 0.4102,
+ "step": 888
+ },
+ {
+ "epoch": 2.779749478079332,
+ "grad_norm": 0.7357314825057983,
+ "learning_rate": 3.0073498043468247e-06,
+ "loss": 0.4649,
+ "step": 889
+ },
+ {
+ "epoch": 2.7828810020876826,
+ "grad_norm": 0.7815471291542053,
+ "learning_rate": 3.0031094923415993e-06,
+ "loss": 0.469,
+ "step": 890
+ },
+ {
+ "epoch": 2.7860125260960333,
+ "grad_norm": 0.7856019139289856,
+ "learning_rate": 2.9988676713421318e-06,
+ "loss": 0.4241,
+ "step": 891
+ },
+ {
+ "epoch": 2.789144050104384,
+ "grad_norm": 0.7668167352676392,
+ "learning_rate": 2.994624354071066e-06,
+ "loss": 0.5309,
+ "step": 892
+ },
+ {
+ "epoch": 2.7922755741127347,
+ "grad_norm": 0.7485945820808411,
+ "learning_rate": 2.990379553255535e-06,
+ "loss": 0.5173,
+ "step": 893
+ },
+ {
+ "epoch": 2.7954070981210855,
+ "grad_norm": 0.8065824508666992,
+ "learning_rate": 2.986133281627123e-06,
+ "loss": 0.4995,
+ "step": 894
+ },
+ {
+ "epoch": 2.798538622129436,
+ "grad_norm": 0.7156995534896851,
+ "learning_rate": 2.9818855519218217e-06,
+ "loss": 0.4642,
+ "step": 895
+ },
+ {
+ "epoch": 2.801670146137787,
+ "grad_norm": 0.9115403890609741,
+ "learning_rate": 2.97763637688e-06,
+ "loss": 0.4799,
+ "step": 896
+ },
+ {
+ "epoch": 2.804801670146138,
+ "grad_norm": 0.7466689944267273,
+ "learning_rate": 2.9733857692463584e-06,
+ "loss": 0.4942,
+ "step": 897
+ },
+ {
+ "epoch": 2.8079331941544883,
+ "grad_norm": 0.7484914064407349,
+ "learning_rate": 2.9691337417698974e-06,
+ "loss": 0.4618,
+ "step": 898
+ },
+ {
+ "epoch": 2.8110647181628394,
+ "grad_norm": 0.816704511642456,
+ "learning_rate": 2.9648803072038736e-06,
+ "loss": 0.4748,
+ "step": 899
+ },
+ {
+ "epoch": 2.81419624217119,
+ "grad_norm": 0.7627584934234619,
+ "learning_rate": 2.9606254783057666e-06,
+ "loss": 0.4667,
+ "step": 900
+ },
+ {
+ "epoch": 2.817327766179541,
+ "grad_norm": 0.7341011166572571,
+ "learning_rate": 2.9563692678372342e-06,
+ "loss": 0.4802,
+ "step": 901
+ },
+ {
+ "epoch": 2.8204592901878915,
+ "grad_norm": 1.2541382312774658,
+ "learning_rate": 2.952111688564082e-06,
+ "loss": 0.5231,
+ "step": 902
+ },
+ {
+ "epoch": 2.8235908141962422,
+ "grad_norm": 0.7172819375991821,
+ "learning_rate": 2.9478527532562184e-06,
+ "loss": 0.4488,
+ "step": 903
+ },
+ {
+ "epoch": 2.826722338204593,
+ "grad_norm": 0.774529218673706,
+ "learning_rate": 2.943592474687621e-06,
+ "loss": 0.4964,
+ "step": 904
+ },
+ {
+ "epoch": 2.8298538622129437,
+ "grad_norm": 0.7315672636032104,
+ "learning_rate": 2.939330865636294e-06,
+ "loss": 0.4817,
+ "step": 905
+ },
+ {
+ "epoch": 2.8329853862212944,
+ "grad_norm": 0.7698234915733337,
+ "learning_rate": 2.9350679388842347e-06,
+ "loss": 0.5075,
+ "step": 906
+ },
+ {
+ "epoch": 2.836116910229645,
+ "grad_norm": 0.7717766761779785,
+ "learning_rate": 2.93080370721739e-06,
+ "loss": 0.4789,
+ "step": 907
+ },
+ {
+ "epoch": 2.8392484342379958,
+ "grad_norm": 0.7383570075035095,
+ "learning_rate": 2.926538183425622e-06,
+ "loss": 0.4992,
+ "step": 908
+ },
+ {
+ "epoch": 2.8423799582463465,
+ "grad_norm": 0.7858864068984985,
+ "learning_rate": 2.92227138030267e-06,
+ "loss": 0.4993,
+ "step": 909
+ },
+ {
+ "epoch": 2.845511482254697,
+ "grad_norm": 0.8220369219779968,
+ "learning_rate": 2.9180033106461076e-06,
+ "loss": 0.4929,
+ "step": 910
+ },
+ {
+ "epoch": 2.848643006263048,
+ "grad_norm": 0.7507152557373047,
+ "learning_rate": 2.9137339872573086e-06,
+ "loss": 0.4394,
+ "step": 911
+ },
+ {
+ "epoch": 2.8517745302713986,
+ "grad_norm": 0.7935269474983215,
+ "learning_rate": 2.9094634229414063e-06,
+ "loss": 0.4656,
+ "step": 912
+ },
+ {
+ "epoch": 2.8549060542797493,
+ "grad_norm": 0.9187721610069275,
+ "learning_rate": 2.9051916305072576e-06,
+ "loss": 0.4918,
+ "step": 913
+ },
+ {
+ "epoch": 2.8580375782881005,
+ "grad_norm": 0.8699706792831421,
+ "learning_rate": 2.9009186227674e-06,
+ "loss": 0.5106,
+ "step": 914
+ },
+ {
+ "epoch": 2.8611691022964507,
+ "grad_norm": 0.7175673246383667,
+ "learning_rate": 2.896644412538021e-06,
+ "loss": 0.5105,
+ "step": 915
+ },
+ {
+ "epoch": 2.864300626304802,
+ "grad_norm": 0.8563990592956543,
+ "learning_rate": 2.892369012638909e-06,
+ "loss": 0.4993,
+ "step": 916
+ },
+ {
+ "epoch": 2.867432150313152,
+ "grad_norm": 0.7891882658004761,
+ "learning_rate": 2.8880924358934246e-06,
+ "loss": 0.4983,
+ "step": 917
+ },
+ {
+ "epoch": 2.8705636743215033,
+ "grad_norm": 0.9247110486030579,
+ "learning_rate": 2.8838146951284575e-06,
+ "loss": 0.4789,
+ "step": 918
+ },
+ {
+ "epoch": 2.873695198329854,
+ "grad_norm": 0.7523055672645569,
+ "learning_rate": 2.879535803174387e-06,
+ "loss": 0.4982,
+ "step": 919
+ },
+ {
+ "epoch": 2.8768267223382047,
+ "grad_norm": 0.8096909523010254,
+ "learning_rate": 2.8752557728650467e-06,
+ "loss": 0.4958,
+ "step": 920
+ },
+ {
+ "epoch": 2.8799582463465554,
+ "grad_norm": 2.3476874828338623,
+ "learning_rate": 2.870974617037684e-06,
+ "loss": 0.491,
+ "step": 921
+ },
+ {
+ "epoch": 2.883089770354906,
+ "grad_norm": 0.8388578295707703,
+ "learning_rate": 2.8666923485329224e-06,
+ "loss": 0.5275,
+ "step": 922
+ },
+ {
+ "epoch": 2.886221294363257,
+ "grad_norm": 0.8162729144096375,
+ "learning_rate": 2.8624089801947234e-06,
+ "loss": 0.4776,
+ "step": 923
+ },
+ {
+ "epoch": 2.8893528183716075,
+ "grad_norm": 0.7306103110313416,
+ "learning_rate": 2.858124524870345e-06,
+ "loss": 0.4814,
+ "step": 924
+ },
+ {
+ "epoch": 2.892484342379958,
+ "grad_norm": 0.8736817836761475,
+ "learning_rate": 2.853838995410307e-06,
+ "loss": 0.5097,
+ "step": 925
+ },
+ {
+ "epoch": 2.895615866388309,
+ "grad_norm": 0.7771823406219482,
+ "learning_rate": 2.8495524046683525e-06,
+ "loss": 0.4806,
+ "step": 926
+ },
+ {
+ "epoch": 2.8987473903966596,
+ "grad_norm": 0.9421334862709045,
+ "learning_rate": 2.845264765501404e-06,
+ "loss": 0.5055,
+ "step": 927
+ },
+ {
+ "epoch": 2.9018789144050103,
+ "grad_norm": 0.8403921127319336,
+ "learning_rate": 2.8409760907695314e-06,
+ "loss": 0.4775,
+ "step": 928
+ },
+ {
+ "epoch": 2.905010438413361,
+ "grad_norm": 0.8095362186431885,
+ "learning_rate": 2.836686393335909e-06,
+ "loss": 0.4532,
+ "step": 929
+ },
+ {
+ "epoch": 2.9081419624217117,
+ "grad_norm": 0.7340645790100098,
+ "learning_rate": 2.8323956860667813e-06,
+ "loss": 0.4835,
+ "step": 930
+ },
+ {
+ "epoch": 2.911273486430063,
+ "grad_norm": 0.6970911026000977,
+ "learning_rate": 2.828103981831417e-06,
+ "loss": 0.4999,
+ "step": 931
+ },
+ {
+ "epoch": 2.914405010438413,
+ "grad_norm": 0.8136418461799622,
+ "learning_rate": 2.8238112935020794e-06,
+ "loss": 0.5038,
+ "step": 932
+ },
+ {
+ "epoch": 2.9175365344467643,
+ "grad_norm": 0.9045608043670654,
+ "learning_rate": 2.8195176339539816e-06,
+ "loss": 0.486,
+ "step": 933
+ },
+ {
+ "epoch": 2.9206680584551146,
+ "grad_norm": 1.14940345287323,
+ "learning_rate": 2.815223016065249e-06,
+ "loss": 0.5079,
+ "step": 934
+ },
+ {
+ "epoch": 2.9237995824634657,
+ "grad_norm": 0.7411190867424011,
+ "learning_rate": 2.8109274527168826e-06,
+ "loss": 0.4564,
+ "step": 935
+ },
+ {
+ "epoch": 2.9269311064718164,
+ "grad_norm": 0.8903455138206482,
+ "learning_rate": 2.806630956792719e-06,
+ "loss": 0.451,
+ "step": 936
+ },
+ {
+ "epoch": 2.930062630480167,
+ "grad_norm": 0.7865445017814636,
+ "learning_rate": 2.8023335411793904e-06,
+ "loss": 0.4658,
+ "step": 937
+ },
+ {
+ "epoch": 2.933194154488518,
+ "grad_norm": 0.8185790777206421,
+ "learning_rate": 2.798035218766292e-06,
+ "loss": 0.4776,
+ "step": 938
+ },
+ {
+ "epoch": 2.9363256784968685,
+ "grad_norm": 0.7516276836395264,
+ "learning_rate": 2.793736002445531e-06,
+ "loss": 0.4447,
+ "step": 939
+ },
+ {
+ "epoch": 2.9394572025052192,
+ "grad_norm": 0.738080620765686,
+ "learning_rate": 2.789435905111903e-06,
+ "loss": 0.4832,
+ "step": 940
+ },
+ {
+ "epoch": 2.94258872651357,
+ "grad_norm": 0.7971507906913757,
+ "learning_rate": 2.785134939662843e-06,
+ "loss": 0.4835,
+ "step": 941
+ },
+ {
+ "epoch": 2.9457202505219207,
+ "grad_norm": 0.7529093623161316,
+ "learning_rate": 2.78083311899839e-06,
+ "loss": 0.4759,
+ "step": 942
+ },
+ {
+ "epoch": 2.9488517745302714,
+ "grad_norm": 0.8222358226776123,
+ "learning_rate": 2.7765304560211482e-06,
+ "loss": 0.4365,
+ "step": 943
+ },
+ {
+ "epoch": 2.951983298538622,
+ "grad_norm": 0.729945182800293,
+ "learning_rate": 2.7722269636362462e-06,
+ "loss": 0.5026,
+ "step": 944
+ },
+ {
+ "epoch": 2.9551148225469728,
+ "grad_norm": 0.7287900447845459,
+ "learning_rate": 2.767922654751306e-06,
+ "loss": 0.4916,
+ "step": 945
+ },
+ {
+ "epoch": 2.9582463465553235,
+ "grad_norm": 0.869637131690979,
+ "learning_rate": 2.763617542276391e-06,
+ "loss": 0.5018,
+ "step": 946
+ },
+ {
+ "epoch": 2.961377870563674,
+ "grad_norm": 1.004909634590149,
+ "learning_rate": 2.7593116391239806e-06,
+ "loss": 0.5152,
+ "step": 947
+ },
+ {
+ "epoch": 2.964509394572025,
+ "grad_norm": 0.8263046145439148,
+ "learning_rate": 2.7550049582089235e-06,
+ "loss": 0.5249,
+ "step": 948
+ },
+ {
+ "epoch": 2.9676409185803756,
+ "grad_norm": 0.7963895797729492,
+ "learning_rate": 2.750697512448401e-06,
+ "loss": 0.5084,
+ "step": 949
+ },
+ {
+ "epoch": 2.9707724425887267,
+ "grad_norm": 0.7211249470710754,
+ "learning_rate": 2.7463893147618893e-06,
+ "loss": 0.4691,
+ "step": 950
+ },
+ {
+ "epoch": 2.973903966597077,
+ "grad_norm": 0.8010216951370239,
+ "learning_rate": 2.742080378071118e-06,
+ "loss": 0.5026,
+ "step": 951
+ },
+ {
+ "epoch": 2.977035490605428,
+ "grad_norm": 0.780078649520874,
+ "learning_rate": 2.7377707153000356e-06,
+ "loss": 0.4758,
+ "step": 952
+ },
+ {
+ "epoch": 2.980167014613779,
+ "grad_norm": 0.7728193998336792,
+ "learning_rate": 2.7334603393747684e-06,
+ "loss": 0.488,
+ "step": 953
+ },
+ {
+ "epoch": 2.9832985386221296,
+ "grad_norm": 0.836329996585846,
+ "learning_rate": 2.7291492632235777e-06,
+ "loss": 0.456,
+ "step": 954
+ },
+ {
+ "epoch": 2.9864300626304803,
+ "grad_norm": 0.7241990566253662,
+ "learning_rate": 2.724837499776831e-06,
+ "loss": 0.4953,
+ "step": 955
+ },
+ {
+ "epoch": 2.989561586638831,
+ "grad_norm": 0.7595076560974121,
+ "learning_rate": 2.7205250619669527e-06,
+ "loss": 0.446,
+ "step": 956
+ },
+ {
+ "epoch": 2.9926931106471817,
+ "grad_norm": 0.8177686333656311,
+ "learning_rate": 2.716211962728392e-06,
+ "loss": 0.5057,
+ "step": 957
+ },
+ {
+ "epoch": 2.9958246346555324,
+ "grad_norm": 0.7506977915763855,
+ "learning_rate": 2.71189821499758e-06,
+ "loss": 0.4821,
+ "step": 958
+ },
+ {
+ "epoch": 2.998956158663883,
+ "grad_norm": 0.8085163235664368,
+ "learning_rate": 2.7075838317128943e-06,
+ "loss": 0.5002,
+ "step": 959
+ },
+ {
+ "epoch": 3.0,
+ "grad_norm": 3.3674418926239014,
+ "learning_rate": 2.7032688258146207e-06,
+ "loss": 0.183,
+ "step": 960
+ },
+ {
+ "epoch": 3.0031315240083507,
+ "grad_norm": 1.066100835800171,
+ "learning_rate": 2.698953210244908e-06,
+ "loss": 0.4427,
+ "step": 961
+ },
+ {
+ "epoch": 3.0062630480167014,
+ "grad_norm": 0.7690210938453674,
+ "learning_rate": 2.6946369979477365e-06,
+ "loss": 0.4638,
+ "step": 962
+ },
+ {
+ "epoch": 3.009394572025052,
+ "grad_norm": 0.8114679455757141,
+ "learning_rate": 2.690320201868876e-06,
+ "loss": 0.4373,
+ "step": 963
+ },
+ {
+ "epoch": 3.012526096033403,
+ "grad_norm": 0.7680971622467041,
+ "learning_rate": 2.686002834955847e-06,
+ "loss": 0.4477,
+ "step": 964
+ },
+ {
+ "epoch": 3.0156576200417535,
+ "grad_norm": 0.7194678783416748,
+ "learning_rate": 2.6816849101578808e-06,
+ "loss": 0.455,
+ "step": 965
+ },
+ {
+ "epoch": 3.0187891440501042,
+ "grad_norm": 0.6890467405319214,
+ "learning_rate": 2.6773664404258854e-06,
+ "loss": 0.4246,
+ "step": 966
+ },
+ {
+ "epoch": 3.021920668058455,
+ "grad_norm": 0.8064301013946533,
+ "learning_rate": 2.6730474387123987e-06,
+ "loss": 0.497,
+ "step": 967
+ },
+ {
+ "epoch": 3.0250521920668056,
+ "grad_norm": 0.8164849281311035,
+ "learning_rate": 2.668727917971559e-06,
+ "loss": 0.457,
+ "step": 968
+ },
+ {
+ "epoch": 3.028183716075157,
+ "grad_norm": 0.7793440818786621,
+ "learning_rate": 2.6644078911590565e-06,
+ "loss": 0.4632,
+ "step": 969
+ },
+ {
+ "epoch": 3.0313152400835075,
+ "grad_norm": 0.7208535671234131,
+ "learning_rate": 2.6600873712321033e-06,
+ "loss": 0.4786,
+ "step": 970
+ },
+ {
+ "epoch": 3.034446764091858,
+ "grad_norm": 0.8905500769615173,
+ "learning_rate": 2.655766371149389e-06,
+ "loss": 0.4317,
+ "step": 971
+ },
+ {
+ "epoch": 3.037578288100209,
+ "grad_norm": 0.7537338733673096,
+ "learning_rate": 2.6514449038710418e-06,
+ "loss": 0.4783,
+ "step": 972
+ },
+ {
+ "epoch": 3.0407098121085596,
+ "grad_norm": 0.7901656031608582,
+ "learning_rate": 2.6471229823585937e-06,
+ "loss": 0.4762,
+ "step": 973
+ },
+ {
+ "epoch": 3.0438413361169103,
+ "grad_norm": 0.7427340745925903,
+ "learning_rate": 2.6428006195749373e-06,
+ "loss": 0.4782,
+ "step": 974
+ },
+ {
+ "epoch": 3.046972860125261,
+ "grad_norm": 0.8364680409431458,
+ "learning_rate": 2.6384778284842905e-06,
+ "loss": 0.4551,
+ "step": 975
+ },
+ {
+ "epoch": 3.0501043841336117,
+ "grad_norm": 1.1049500703811646,
+ "learning_rate": 2.634154622052155e-06,
+ "loss": 0.4451,
+ "step": 976
+ },
+ {
+ "epoch": 3.0532359081419624,
+ "grad_norm": 1.0797414779663086,
+ "learning_rate": 2.6298310132452757e-06,
+ "loss": 0.4512,
+ "step": 977
+ },
+ {
+ "epoch": 3.056367432150313,
+ "grad_norm": 0.8832088708877563,
+ "learning_rate": 2.62550701503161e-06,
+ "loss": 0.4569,
+ "step": 978
+ },
+ {
+ "epoch": 3.059498956158664,
+ "grad_norm": 0.748951256275177,
+ "learning_rate": 2.621182640380277e-06,
+ "loss": 0.4535,
+ "step": 979
+ },
+ {
+ "epoch": 3.0626304801670146,
+ "grad_norm": 0.776386022567749,
+ "learning_rate": 2.616857902261529e-06,
+ "loss": 0.4582,
+ "step": 980
+ },
+ {
+ "epoch": 3.0657620041753653,
+ "grad_norm": 0.730057418346405,
+ "learning_rate": 2.6125328136467074e-06,
+ "loss": 0.4581,
+ "step": 981
+ },
+ {
+ "epoch": 3.068893528183716,
+ "grad_norm": 0.7918877005577087,
+ "learning_rate": 2.6082073875082046e-06,
+ "loss": 0.4355,
+ "step": 982
+ },
+ {
+ "epoch": 3.0720250521920667,
+ "grad_norm": 0.803987443447113,
+ "learning_rate": 2.6038816368194265e-06,
+ "loss": 0.455,
+ "step": 983
+ },
+ {
+ "epoch": 3.0751565762004174,
+ "grad_norm": 1.2970365285873413,
+ "learning_rate": 2.599555574554749e-06,
+ "loss": 0.448,
+ "step": 984
+ },
+ {
+ "epoch": 3.078288100208768,
+ "grad_norm": 0.7814194560050964,
+ "learning_rate": 2.595229213689487e-06,
+ "loss": 0.4197,
+ "step": 985
+ },
+ {
+ "epoch": 3.081419624217119,
+ "grad_norm": 0.9865803122520447,
+ "learning_rate": 2.5909025671998483e-06,
+ "loss": 0.4649,
+ "step": 986
+ },
+ {
+ "epoch": 3.08455114822547,
+ "grad_norm": 0.7769168615341187,
+ "learning_rate": 2.586575648062898e-06,
+ "loss": 0.4287,
+ "step": 987
+ },
+ {
+ "epoch": 3.0876826722338206,
+ "grad_norm": 1.454988956451416,
+ "learning_rate": 2.582248469256519e-06,
+ "loss": 0.4548,
+ "step": 988
+ },
+ {
+ "epoch": 3.0908141962421714,
+ "grad_norm": 0.7656151056289673,
+ "learning_rate": 2.577921043759372e-06,
+ "loss": 0.4323,
+ "step": 989
+ },
+ {
+ "epoch": 3.093945720250522,
+ "grad_norm": 0.7989768385887146,
+ "learning_rate": 2.5735933845508598e-06,
+ "loss": 0.4616,
+ "step": 990
+ },
+ {
+ "epoch": 3.0970772442588728,
+ "grad_norm": 0.8711255192756653,
+ "learning_rate": 2.5692655046110855e-06,
+ "loss": 0.4923,
+ "step": 991
+ },
+ {
+ "epoch": 3.1002087682672235,
+ "grad_norm": 0.7333446145057678,
+ "learning_rate": 2.564937416920813e-06,
+ "loss": 0.4297,
+ "step": 992
+ },
+ {
+ "epoch": 3.103340292275574,
+ "grad_norm": 0.7611051201820374,
+ "learning_rate": 2.5606091344614297e-06,
+ "loss": 0.4276,
+ "step": 993
+ },
+ {
+ "epoch": 3.106471816283925,
+ "grad_norm": 0.9742305278778076,
+ "learning_rate": 2.5562806702149083e-06,
+ "loss": 0.4597,
+ "step": 994
+ },
+ {
+ "epoch": 3.1096033402922756,
+ "grad_norm": 0.8043314218521118,
+ "learning_rate": 2.551952037163765e-06,
+ "loss": 0.459,
+ "step": 995
+ },
+ {
+ "epoch": 3.1127348643006263,
+ "grad_norm": 0.7217118740081787,
+ "learning_rate": 2.5476232482910253e-06,
+ "loss": 0.4514,
+ "step": 996
+ },
+ {
+ "epoch": 3.115866388308977,
+ "grad_norm": 1.1410573720932007,
+ "learning_rate": 2.5432943165801765e-06,
+ "loss": 0.4942,
+ "step": 997
+ },
+ {
+ "epoch": 3.1189979123173277,
+ "grad_norm": 0.7188895344734192,
+ "learning_rate": 2.5389652550151416e-06,
+ "loss": 0.4641,
+ "step": 998
+ },
+ {
+ "epoch": 3.1221294363256784,
+ "grad_norm": 0.7223486304283142,
+ "learning_rate": 2.5346360765802276e-06,
+ "loss": 0.4382,
+ "step": 999
+ },
+ {
+ "epoch": 3.125260960334029,
+ "grad_norm": 0.8269757032394409,
+ "learning_rate": 2.5303067942600933e-06,
+ "loss": 0.4502,
+ "step": 1000
+ },
+ {
+ "epoch": 3.12839248434238,
+ "grad_norm": 0.7081235647201538,
+ "learning_rate": 2.5259774210397104e-06,
+ "loss": 0.4389,
+ "step": 1001
+ },
+ {
+ "epoch": 3.1315240083507305,
+ "grad_norm": 0.7493525147438049,
+ "learning_rate": 2.5216479699043224e-06,
+ "loss": 0.4697,
+ "step": 1002
+ },
+ {
+ "epoch": 3.1346555323590812,
+ "grad_norm": 0.7633835077285767,
+ "learning_rate": 2.5173184538394054e-06,
+ "loss": 0.4535,
+ "step": 1003
+ },
+ {
+ "epoch": 3.137787056367432,
+ "grad_norm": 0.7765631079673767,
+ "learning_rate": 2.5129888858306333e-06,
+ "loss": 0.4616,
+ "step": 1004
+ },
+ {
+ "epoch": 3.140918580375783,
+ "grad_norm": 0.7026706337928772,
+ "learning_rate": 2.508659278863832e-06,
+ "loss": 0.4535,
+ "step": 1005
+ },
+ {
+ "epoch": 3.144050104384134,
+ "grad_norm": 0.8816738128662109,
+ "learning_rate": 2.5043296459249466e-06,
+ "loss": 0.4274,
+ "step": 1006
+ },
+ {
+ "epoch": 3.1471816283924845,
+ "grad_norm": 0.8072274327278137,
+ "learning_rate": 2.5e-06,
+ "loss": 0.4566,
+ "step": 1007
+ },
+ {
+ "epoch": 3.150313152400835,
+ "grad_norm": 0.7775781750679016,
+ "learning_rate": 2.4956703540750542e-06,
+ "loss": 0.4248,
+ "step": 1008
+ },
+ {
+ "epoch": 3.153444676409186,
+ "grad_norm": 0.9271306991577148,
+ "learning_rate": 2.4913407211361686e-06,
+ "loss": 0.4251,
+ "step": 1009
+ },
+ {
+ "epoch": 3.1565762004175366,
+ "grad_norm": 0.763867974281311,
+ "learning_rate": 2.487011114169368e-06,
+ "loss": 0.4525,
+ "step": 1010
+ },
+ {
+ "epoch": 3.1597077244258873,
+ "grad_norm": 0.8754820227622986,
+ "learning_rate": 2.4826815461605955e-06,
+ "loss": 0.4524,
+ "step": 1011
+ },
+ {
+ "epoch": 3.162839248434238,
+ "grad_norm": 0.8261513113975525,
+ "learning_rate": 2.4783520300956784e-06,
+ "loss": 0.4303,
+ "step": 1012
+ },
+ {
+ "epoch": 3.1659707724425887,
+ "grad_norm": 0.8629854917526245,
+ "learning_rate": 2.4740225789602905e-06,
+ "loss": 0.4628,
+ "step": 1013
+ },
+ {
+ "epoch": 3.1691022964509394,
+ "grad_norm": 0.7281518578529358,
+ "learning_rate": 2.469693205739907e-06,
+ "loss": 0.461,
+ "step": 1014
+ },
+ {
+ "epoch": 3.17223382045929,
+ "grad_norm": 0.757644772529602,
+ "learning_rate": 2.465363923419774e-06,
+ "loss": 0.4455,
+ "step": 1015
+ },
+ {
+ "epoch": 3.175365344467641,
+ "grad_norm": 0.8021314740180969,
+ "learning_rate": 2.4610347449848592e-06,
+ "loss": 0.4755,
+ "step": 1016
+ },
+ {
+ "epoch": 3.1784968684759916,
+ "grad_norm": 0.7467564940452576,
+ "learning_rate": 2.456705683419824e-06,
+ "loss": 0.4419,
+ "step": 1017
+ },
+ {
+ "epoch": 3.1816283924843423,
+ "grad_norm": 0.8575125336647034,
+ "learning_rate": 2.452376751708976e-06,
+ "loss": 0.4577,
+ "step": 1018
+ },
+ {
+ "epoch": 3.184759916492693,
+ "grad_norm": 0.8101590871810913,
+ "learning_rate": 2.448047962836235e-06,
+ "loss": 0.4546,
+ "step": 1019
+ },
+ {
+ "epoch": 3.1878914405010437,
+ "grad_norm": 0.8146190643310547,
+ "learning_rate": 2.443719329785093e-06,
+ "loss": 0.4219,
+ "step": 1020
+ },
+ {
+ "epoch": 3.1910229645093944,
+ "grad_norm": 1.2800556421279907,
+ "learning_rate": 2.4393908655385708e-06,
+ "loss": 0.4502,
+ "step": 1021
+ },
+ {
+ "epoch": 3.1941544885177455,
+ "grad_norm": 0.7362221479415894,
+ "learning_rate": 2.4350625830791875e-06,
+ "loss": 0.4802,
+ "step": 1022
+ },
+ {
+ "epoch": 3.1972860125260962,
+ "grad_norm": 0.8307201862335205,
+ "learning_rate": 2.430734495388915e-06,
+ "loss": 0.4808,
+ "step": 1023
+ },
+ {
+ "epoch": 3.200417536534447,
+ "grad_norm": 0.8467394709587097,
+ "learning_rate": 2.42640661544914e-06,
+ "loss": 0.4598,
+ "step": 1024
+ },
+ {
+ "epoch": 3.2035490605427976,
+ "grad_norm": 0.9570673108100891,
+ "learning_rate": 2.422078956240629e-06,
+ "loss": 0.4341,
+ "step": 1025
+ },
+ {
+ "epoch": 3.2066805845511483,
+ "grad_norm": 0.7976422309875488,
+ "learning_rate": 2.4177515307434824e-06,
+ "loss": 0.4285,
+ "step": 1026
+ },
+ {
+ "epoch": 3.209812108559499,
+ "grad_norm": 0.8241607546806335,
+ "learning_rate": 2.413424351937103e-06,
+ "loss": 0.4563,
+ "step": 1027
+ },
+ {
+ "epoch": 3.2129436325678498,
+ "grad_norm": 0.7191072702407837,
+ "learning_rate": 2.4090974328001526e-06,
+ "loss": 0.456,
+ "step": 1028
+ },
+ {
+ "epoch": 3.2160751565762005,
+ "grad_norm": 0.8112174868583679,
+ "learning_rate": 2.4047707863105133e-06,
+ "loss": 0.4592,
+ "step": 1029
+ },
+ {
+ "epoch": 3.219206680584551,
+ "grad_norm": 0.7170486450195312,
+ "learning_rate": 2.4004444254452522e-06,
+ "loss": 0.4559,
+ "step": 1030
+ },
+ {
+ "epoch": 3.222338204592902,
+ "grad_norm": 0.7092299461364746,
+ "learning_rate": 2.3961183631805748e-06,
+ "loss": 0.4504,
+ "step": 1031
+ },
+ {
+ "epoch": 3.2254697286012526,
+ "grad_norm": 0.8104662299156189,
+ "learning_rate": 2.391792612491796e-06,
+ "loss": 0.4327,
+ "step": 1032
+ },
+ {
+ "epoch": 3.2286012526096033,
+ "grad_norm": 0.8512858152389526,
+ "learning_rate": 2.387467186353293e-06,
+ "loss": 0.4506,
+ "step": 1033
+ },
+ {
+ "epoch": 3.231732776617954,
+ "grad_norm": 0.7987692952156067,
+ "learning_rate": 2.3831420977384715e-06,
+ "loss": 0.4257,
+ "step": 1034
+ },
+ {
+ "epoch": 3.2348643006263047,
+ "grad_norm": 0.7805537581443787,
+ "learning_rate": 2.3788173596197244e-06,
+ "loss": 0.4692,
+ "step": 1035
+ },
+ {
+ "epoch": 3.2379958246346554,
+ "grad_norm": 0.737304151058197,
+ "learning_rate": 2.374492984968392e-06,
+ "loss": 0.4308,
+ "step": 1036
+ },
+ {
+ "epoch": 3.241127348643006,
+ "grad_norm": 0.8113856315612793,
+ "learning_rate": 2.3701689867547247e-06,
+ "loss": 0.4668,
+ "step": 1037
+ },
+ {
+ "epoch": 3.244258872651357,
+ "grad_norm": 1.507103443145752,
+ "learning_rate": 2.3658453779478464e-06,
+ "loss": 0.4527,
+ "step": 1038
+ },
+ {
+ "epoch": 3.2473903966597075,
+ "grad_norm": 0.7973915338516235,
+ "learning_rate": 2.3615221715157095e-06,
+ "loss": 0.4741,
+ "step": 1039
+ },
+ {
+ "epoch": 3.2505219206680582,
+ "grad_norm": 0.8206940293312073,
+ "learning_rate": 2.3571993804250635e-06,
+ "loss": 0.4582,
+ "step": 1040
+ },
+ {
+ "epoch": 3.2536534446764094,
+ "grad_norm": 0.8519637584686279,
+ "learning_rate": 2.3528770176414076e-06,
+ "loss": 0.4412,
+ "step": 1041
+ },
+ {
+ "epoch": 3.25678496868476,
+ "grad_norm": 0.7542241811752319,
+ "learning_rate": 2.348555096128959e-06,
+ "loss": 0.4861,
+ "step": 1042
+ },
+ {
+ "epoch": 3.259916492693111,
+ "grad_norm": 0.8074842691421509,
+ "learning_rate": 2.3442336288506125e-06,
+ "loss": 0.4707,
+ "step": 1043
+ },
+ {
+ "epoch": 3.2630480167014615,
+ "grad_norm": 0.8164265751838684,
+ "learning_rate": 2.3399126287678975e-06,
+ "loss": 0.4417,
+ "step": 1044
+ },
+ {
+ "epoch": 3.266179540709812,
+ "grad_norm": 0.7689628005027771,
+ "learning_rate": 2.3355921088409435e-06,
+ "loss": 0.4815,
+ "step": 1045
+ },
+ {
+ "epoch": 3.269311064718163,
+ "grad_norm": 0.7709240913391113,
+ "learning_rate": 2.3312720820284423e-06,
+ "loss": 0.4444,
+ "step": 1046
+ },
+ {
+ "epoch": 3.2724425887265136,
+ "grad_norm": 0.7340330481529236,
+ "learning_rate": 2.326952561287602e-06,
+ "loss": 0.4694,
+ "step": 1047
+ },
+ {
+ "epoch": 3.2755741127348643,
+ "grad_norm": 0.7785805463790894,
+ "learning_rate": 2.3226335595741154e-06,
+ "loss": 0.4273,
+ "step": 1048
+ },
+ {
+ "epoch": 3.278705636743215,
+ "grad_norm": 0.9968108534812927,
+ "learning_rate": 2.3183150898421196e-06,
+ "loss": 0.4782,
+ "step": 1049
+ },
+ {
+ "epoch": 3.2818371607515657,
+ "grad_norm": 0.7823553681373596,
+ "learning_rate": 2.3139971650441533e-06,
+ "loss": 0.444,
+ "step": 1050
+ },
+ {
+ "epoch": 3.2849686847599164,
+ "grad_norm": 0.7317377924919128,
+ "learning_rate": 2.3096797981311252e-06,
+ "loss": 0.4692,
+ "step": 1051
+ },
+ {
+ "epoch": 3.288100208768267,
+ "grad_norm": 0.8546518683433533,
+ "learning_rate": 2.3053630020522643e-06,
+ "loss": 0.4632,
+ "step": 1052
+ },
+ {
+ "epoch": 3.291231732776618,
+ "grad_norm": 1.2284396886825562,
+ "learning_rate": 2.301046789755093e-06,
+ "loss": 0.4409,
+ "step": 1053
+ },
+ {
+ "epoch": 3.2943632567849686,
+ "grad_norm": 0.8000460863113403,
+ "learning_rate": 2.2967311741853797e-06,
+ "loss": 0.4456,
+ "step": 1054
+ },
+ {
+ "epoch": 3.2974947807933193,
+ "grad_norm": 0.7689793109893799,
+ "learning_rate": 2.2924161682871053e-06,
+ "loss": 0.45,
+ "step": 1055
+ },
+ {
+ "epoch": 3.30062630480167,
+ "grad_norm": 0.8032956719398499,
+ "learning_rate": 2.288101785002421e-06,
+ "loss": 0.4817,
+ "step": 1056
+ },
+ {
+ "epoch": 3.3037578288100207,
+ "grad_norm": 0.6831309795379639,
+ "learning_rate": 2.283788037271609e-06,
+ "loss": 0.4502,
+ "step": 1057
+ },
+ {
+ "epoch": 3.306889352818372,
+ "grad_norm": 0.8581221103668213,
+ "learning_rate": 2.279474938033048e-06,
+ "loss": 0.4569,
+ "step": 1058
+ },
+ {
+ "epoch": 3.3100208768267225,
+ "grad_norm": 0.7937221527099609,
+ "learning_rate": 2.2751625002231696e-06,
+ "loss": 0.4451,
+ "step": 1059
+ },
+ {
+ "epoch": 3.3131524008350732,
+ "grad_norm": 0.8095264434814453,
+ "learning_rate": 2.270850736776422e-06,
+ "loss": 0.4462,
+ "step": 1060
+ },
+ {
+ "epoch": 3.316283924843424,
+ "grad_norm": 0.9141370058059692,
+ "learning_rate": 2.2665396606252332e-06,
+ "loss": 0.419,
+ "step": 1061
+ },
+ {
+ "epoch": 3.3194154488517746,
+ "grad_norm": 0.8648553490638733,
+ "learning_rate": 2.262229284699965e-06,
+ "loss": 0.4562,
+ "step": 1062
+ },
+ {
+ "epoch": 3.3225469728601253,
+ "grad_norm": 0.7716917395591736,
+ "learning_rate": 2.2579196219288825e-06,
+ "loss": 0.4734,
+ "step": 1063
+ },
+ {
+ "epoch": 3.325678496868476,
+ "grad_norm": 0.8074535727500916,
+ "learning_rate": 2.2536106852381116e-06,
+ "loss": 0.4272,
+ "step": 1064
+ },
+ {
+ "epoch": 3.3288100208768268,
+ "grad_norm": 0.8989127278327942,
+ "learning_rate": 2.249302487551599e-06,
+ "loss": 0.4728,
+ "step": 1065
+ },
+ {
+ "epoch": 3.3319415448851775,
+ "grad_norm": 0.7662765383720398,
+ "learning_rate": 2.2449950417910777e-06,
+ "loss": 0.4457,
+ "step": 1066
+ },
+ {
+ "epoch": 3.335073068893528,
+ "grad_norm": 0.7789275050163269,
+ "learning_rate": 2.24068836087602e-06,
+ "loss": 0.3919,
+ "step": 1067
+ },
+ {
+ "epoch": 3.338204592901879,
+ "grad_norm": 0.7359098196029663,
+ "learning_rate": 2.2363824577236097e-06,
+ "loss": 0.4876,
+ "step": 1068
+ },
+ {
+ "epoch": 3.3413361169102296,
+ "grad_norm": 0.9960948824882507,
+ "learning_rate": 2.232077345248695e-06,
+ "loss": 0.4894,
+ "step": 1069
+ },
+ {
+ "epoch": 3.3444676409185803,
+ "grad_norm": 0.7446064352989197,
+ "learning_rate": 2.2277730363637537e-06,
+ "loss": 0.4765,
+ "step": 1070
+ },
+ {
+ "epoch": 3.347599164926931,
+ "grad_norm": 0.7674328088760376,
+ "learning_rate": 2.2234695439788534e-06,
+ "loss": 0.4468,
+ "step": 1071
+ },
+ {
+ "epoch": 3.3507306889352817,
+ "grad_norm": 0.8957347869873047,
+ "learning_rate": 2.2191668810016105e-06,
+ "loss": 0.4733,
+ "step": 1072
+ },
+ {
+ "epoch": 3.3538622129436324,
+ "grad_norm": 0.9110277891159058,
+ "learning_rate": 2.2148650603371573e-06,
+ "loss": 0.4399,
+ "step": 1073
+ },
+ {
+ "epoch": 3.356993736951983,
+ "grad_norm": 0.9158220291137695,
+ "learning_rate": 2.2105640948880976e-06,
+ "loss": 0.4609,
+ "step": 1074
+ },
+ {
+ "epoch": 3.3601252609603343,
+ "grad_norm": 0.7630184888839722,
+ "learning_rate": 2.206263997554469e-06,
+ "loss": 0.4674,
+ "step": 1075
+ },
+ {
+ "epoch": 3.3632567849686845,
+ "grad_norm": 0.7975273728370667,
+ "learning_rate": 2.20196478123371e-06,
+ "loss": 0.4478,
+ "step": 1076
+ },
+ {
+ "epoch": 3.3663883089770357,
+ "grad_norm": 0.8825351595878601,
+ "learning_rate": 2.19766645882061e-06,
+ "loss": 0.4687,
+ "step": 1077
+ },
+ {
+ "epoch": 3.3695198329853864,
+ "grad_norm": 0.8907671570777893,
+ "learning_rate": 2.1933690432072817e-06,
+ "loss": 0.4223,
+ "step": 1078
+ },
+ {
+ "epoch": 3.372651356993737,
+ "grad_norm": 0.7449545860290527,
+ "learning_rate": 2.189072547283118e-06,
+ "loss": 0.4595,
+ "step": 1079
+ },
+ {
+ "epoch": 3.375782881002088,
+ "grad_norm": 0.8460972309112549,
+ "learning_rate": 2.184776983934751e-06,
+ "loss": 0.4443,
+ "step": 1080
+ },
+ {
+ "epoch": 3.3789144050104385,
+ "grad_norm": 0.7524845600128174,
+ "learning_rate": 2.1804823660460196e-06,
+ "loss": 0.4235,
+ "step": 1081
+ },
+ {
+ "epoch": 3.382045929018789,
+ "grad_norm": 0.8448389768600464,
+ "learning_rate": 2.176188706497921e-06,
+ "loss": 0.4387,
+ "step": 1082
+ },
+ {
+ "epoch": 3.38517745302714,
+ "grad_norm": 0.7701981663703918,
+ "learning_rate": 2.1718960181685838e-06,
+ "loss": 0.4257,
+ "step": 1083
+ },
+ {
+ "epoch": 3.3883089770354906,
+ "grad_norm": 0.8178983330726624,
+ "learning_rate": 2.167604313933219e-06,
+ "loss": 0.4983,
+ "step": 1084
+ },
+ {
+ "epoch": 3.3914405010438413,
+ "grad_norm": 0.7477235198020935,
+ "learning_rate": 2.163313606664091e-06,
+ "loss": 0.4559,
+ "step": 1085
+ },
+ {
+ "epoch": 3.394572025052192,
+ "grad_norm": 0.8127962350845337,
+ "learning_rate": 2.1590239092304694e-06,
+ "loss": 0.453,
+ "step": 1086
+ },
+ {
+ "epoch": 3.3977035490605427,
+ "grad_norm": 0.7462339997291565,
+ "learning_rate": 2.1547352344985966e-06,
+ "loss": 0.4697,
+ "step": 1087
+ },
+ {
+ "epoch": 3.4008350730688934,
+ "grad_norm": 0.9641384482383728,
+ "learning_rate": 2.1504475953316483e-06,
+ "loss": 0.4495,
+ "step": 1088
+ },
+ {
+ "epoch": 3.403966597077244,
+ "grad_norm": 0.7612512707710266,
+ "learning_rate": 2.146161004589693e-06,
+ "loss": 0.4579,
+ "step": 1089
+ },
+ {
+ "epoch": 3.407098121085595,
+ "grad_norm": 0.7547829747200012,
+ "learning_rate": 2.141875475129655e-06,
+ "loss": 0.4334,
+ "step": 1090
+ },
+ {
+ "epoch": 3.4102296450939455,
+ "grad_norm": 0.8036953806877136,
+ "learning_rate": 2.137591019805278e-06,
+ "loss": 0.4466,
+ "step": 1091
+ },
+ {
+ "epoch": 3.4133611691022967,
+ "grad_norm": 0.7319284081459045,
+ "learning_rate": 2.1333076514670784e-06,
+ "loss": 0.4942,
+ "step": 1092
+ },
+ {
+ "epoch": 3.416492693110647,
+ "grad_norm": 0.8278589248657227,
+ "learning_rate": 2.1290253829623165e-06,
+ "loss": 0.4554,
+ "step": 1093
+ },
+ {
+ "epoch": 3.419624217118998,
+ "grad_norm": 0.733059287071228,
+ "learning_rate": 2.124744227134954e-06,
+ "loss": 0.4187,
+ "step": 1094
+ },
+ {
+ "epoch": 3.422755741127349,
+ "grad_norm": 0.8222727179527283,
+ "learning_rate": 2.1204641968256136e-06,
+ "loss": 0.4587,
+ "step": 1095
+ },
+ {
+ "epoch": 3.4258872651356995,
+ "grad_norm": 0.8296732902526855,
+ "learning_rate": 2.1161853048715438e-06,
+ "loss": 0.4868,
+ "step": 1096
+ },
+ {
+ "epoch": 3.4290187891440502,
+ "grad_norm": 0.7309690713882446,
+ "learning_rate": 2.1119075641065758e-06,
+ "loss": 0.4594,
+ "step": 1097
+ },
+ {
+ "epoch": 3.432150313152401,
+ "grad_norm": 1.4901788234710693,
+ "learning_rate": 2.1076309873610916e-06,
+ "loss": 0.4216,
+ "step": 1098
+ },
+ {
+ "epoch": 3.4352818371607516,
+ "grad_norm": 0.7993581891059875,
+ "learning_rate": 2.1033555874619794e-06,
+ "loss": 0.4842,
+ "step": 1099
+ },
+ {
+ "epoch": 3.4384133611691023,
+ "grad_norm": 0.8846752643585205,
+ "learning_rate": 2.0990813772325995e-06,
+ "loss": 0.4395,
+ "step": 1100
+ },
+ {
+ "epoch": 3.441544885177453,
+ "grad_norm": 1.0796778202056885,
+ "learning_rate": 2.0948083694927436e-06,
+ "loss": 0.4573,
+ "step": 1101
+ },
+ {
+ "epoch": 3.4446764091858038,
+ "grad_norm": 0.74623042345047,
+ "learning_rate": 2.090536577058595e-06,
+ "loss": 0.4563,
+ "step": 1102
+ },
+ {
+ "epoch": 3.4478079331941545,
+ "grad_norm": 0.8245521783828735,
+ "learning_rate": 2.086266012742692e-06,
+ "loss": 0.4433,
+ "step": 1103
+ },
+ {
+ "epoch": 3.450939457202505,
+ "grad_norm": 0.8003777265548706,
+ "learning_rate": 2.081996689353893e-06,
+ "loss": 0.4599,
+ "step": 1104
+ },
+ {
+ "epoch": 3.454070981210856,
+ "grad_norm": 0.8309001922607422,
+ "learning_rate": 2.0777286196973302e-06,
+ "loss": 0.485,
+ "step": 1105
+ },
+ {
+ "epoch": 3.4572025052192066,
+ "grad_norm": 0.8299122452735901,
+ "learning_rate": 2.0734618165743782e-06,
+ "loss": 0.4685,
+ "step": 1106
+ },
+ {
+ "epoch": 3.4603340292275573,
+ "grad_norm": 0.9347029328346252,
+ "learning_rate": 2.069196292782611e-06,
+ "loss": 0.4615,
+ "step": 1107
+ },
+ {
+ "epoch": 3.463465553235908,
+ "grad_norm": 0.7146593332290649,
+ "learning_rate": 2.064932061115766e-06,
+ "loss": 0.4433,
+ "step": 1108
+ },
+ {
+ "epoch": 3.4665970772442587,
+ "grad_norm": 0.7674420475959778,
+ "learning_rate": 2.0606691343637063e-06,
+ "loss": 0.4444,
+ "step": 1109
+ },
+ {
+ "epoch": 3.4697286012526094,
+ "grad_norm": 0.7925504446029663,
+ "learning_rate": 2.05640752531238e-06,
+ "loss": 0.4631,
+ "step": 1110
+ },
+ {
+ "epoch": 3.4728601252609606,
+ "grad_norm": 0.7755677700042725,
+ "learning_rate": 2.0521472467437825e-06,
+ "loss": 0.4709,
+ "step": 1111
+ },
+ {
+ "epoch": 3.4759916492693113,
+ "grad_norm": 0.8535795211791992,
+ "learning_rate": 2.0478883114359187e-06,
+ "loss": 0.442,
+ "step": 1112
+ },
+ {
+ "epoch": 3.479123173277662,
+ "grad_norm": 0.723953127861023,
+ "learning_rate": 2.043630732162767e-06,
+ "loss": 0.4782,
+ "step": 1113
+ },
+ {
+ "epoch": 3.4822546972860127,
+ "grad_norm": 0.7817316651344299,
+ "learning_rate": 2.0393745216942343e-06,
+ "loss": 0.4841,
+ "step": 1114
+ },
+ {
+ "epoch": 3.4853862212943634,
+ "grad_norm": 0.8878781795501709,
+ "learning_rate": 2.0351196927961268e-06,
+ "loss": 0.4673,
+ "step": 1115
+ },
+ {
+ "epoch": 3.488517745302714,
+ "grad_norm": 0.8645241856575012,
+ "learning_rate": 2.030866258230104e-06,
+ "loss": 0.432,
+ "step": 1116
+ },
+ {
+ "epoch": 3.491649269311065,
+ "grad_norm": 0.7294583320617676,
+ "learning_rate": 2.026614230753643e-06,
+ "loss": 0.4683,
+ "step": 1117
+ },
+ {
+ "epoch": 3.4947807933194155,
+ "grad_norm": 0.7412407994270325,
+ "learning_rate": 2.022363623120001e-06,
+ "loss": 0.4523,
+ "step": 1118
+ },
+ {
+ "epoch": 3.497912317327766,
+ "grad_norm": 0.8559291362762451,
+ "learning_rate": 2.0181144480781787e-06,
+ "loss": 0.4309,
+ "step": 1119
+ },
+ {
+ "epoch": 3.501043841336117,
+ "grad_norm": 0.7442825436592102,
+ "learning_rate": 2.0138667183728775e-06,
+ "loss": 0.4096,
+ "step": 1120
+ },
+ {
+ "epoch": 3.5041753653444676,
+ "grad_norm": 0.7605662941932678,
+ "learning_rate": 2.0096204467444645e-06,
+ "loss": 0.4404,
+ "step": 1121
+ },
+ {
+ "epoch": 3.5073068893528183,
+ "grad_norm": 0.7984277009963989,
+ "learning_rate": 2.005375645928935e-06,
+ "loss": 0.4661,
+ "step": 1122
+ },
+ {
+ "epoch": 3.510438413361169,
+ "grad_norm": 1.1044552326202393,
+ "learning_rate": 2.001132328657869e-06,
+ "loss": 0.4185,
+ "step": 1123
+ },
+ {
+ "epoch": 3.5135699373695197,
+ "grad_norm": 0.8210328817367554,
+ "learning_rate": 1.996890507658401e-06,
+ "loss": 0.4746,
+ "step": 1124
+ },
+ {
+ "epoch": 3.5167014613778704,
+ "grad_norm": 0.7302148342132568,
+ "learning_rate": 1.9926501956531758e-06,
+ "loss": 0.4333,
+ "step": 1125
+ },
+ {
+ "epoch": 3.519832985386221,
+ "grad_norm": 0.7713826894760132,
+ "learning_rate": 1.9884114053603114e-06,
+ "loss": 0.4485,
+ "step": 1126
+ },
+ {
+ "epoch": 3.522964509394572,
+ "grad_norm": 0.7386549711227417,
+ "learning_rate": 1.984174149493365e-06,
+ "loss": 0.4678,
+ "step": 1127
+ },
+ {
+ "epoch": 3.526096033402923,
+ "grad_norm": 0.8006004095077515,
+ "learning_rate": 1.979938440761287e-06,
+ "loss": 0.4755,
+ "step": 1128
+ },
+ {
+ "epoch": 3.5292275574112733,
+ "grad_norm": 1.0635333061218262,
+ "learning_rate": 1.97570429186839e-06,
+ "loss": 0.4985,
+ "step": 1129
+ },
+ {
+ "epoch": 3.5323590814196244,
+ "grad_norm": 0.7470075488090515,
+ "learning_rate": 1.9714717155143083e-06,
+ "loss": 0.4307,
+ "step": 1130
+ },
+ {
+ "epoch": 3.535490605427975,
+ "grad_norm": 0.8314558863639832,
+ "learning_rate": 1.967240724393959e-06,
+ "loss": 0.4894,
+ "step": 1131
+ },
+ {
+ "epoch": 3.538622129436326,
+ "grad_norm": 0.761443018913269,
+ "learning_rate": 1.963011331197506e-06,
+ "loss": 0.4653,
+ "step": 1132
+ },
+ {
+ "epoch": 3.5417536534446765,
+ "grad_norm": 0.7483212351799011,
+ "learning_rate": 1.9587835486103163e-06,
+ "loss": 0.4456,
+ "step": 1133
+ },
+ {
+ "epoch": 3.5448851774530272,
+ "grad_norm": 0.7794159054756165,
+ "learning_rate": 1.9545573893129306e-06,
+ "loss": 0.4707,
+ "step": 1134
+ },
+ {
+ "epoch": 3.548016701461378,
+ "grad_norm": 0.8016185760498047,
+ "learning_rate": 1.950332865981019e-06,
+ "loss": 0.4547,
+ "step": 1135
+ },
+ {
+ "epoch": 3.5511482254697286,
+ "grad_norm": 0.8089869618415833,
+ "learning_rate": 1.9461099912853453e-06,
+ "loss": 0.4499,
+ "step": 1136
+ },
+ {
+ "epoch": 3.5542797494780793,
+ "grad_norm": 0.7774782180786133,
+ "learning_rate": 1.9418887778917286e-06,
+ "loss": 0.4531,
+ "step": 1137
+ },
+ {
+ "epoch": 3.55741127348643,
+ "grad_norm": 0.7793645262718201,
+ "learning_rate": 1.937669238461003e-06,
+ "loss": 0.4553,
+ "step": 1138
+ },
+ {
+ "epoch": 3.5605427974947808,
+ "grad_norm": 0.8139959573745728,
+ "learning_rate": 1.933451385648985e-06,
+ "loss": 0.458,
+ "step": 1139
+ },
+ {
+ "epoch": 3.5636743215031315,
+ "grad_norm": 0.7517053484916687,
+ "learning_rate": 1.929235232106431e-06,
+ "loss": 0.4779,
+ "step": 1140
+ },
+ {
+ "epoch": 3.566805845511482,
+ "grad_norm": 0.8851562142372131,
+ "learning_rate": 1.925020790479e-06,
+ "loss": 0.425,
+ "step": 1141
+ },
+ {
+ "epoch": 3.569937369519833,
+ "grad_norm": 0.8129401803016663,
+ "learning_rate": 1.920808073407218e-06,
+ "loss": 0.4616,
+ "step": 1142
+ },
+ {
+ "epoch": 3.5730688935281836,
+ "grad_norm": 0.7110117077827454,
+ "learning_rate": 1.916597093526437e-06,
+ "loss": 0.4748,
+ "step": 1143
+ },
+ {
+ "epoch": 3.5762004175365343,
+ "grad_norm": 0.8268555402755737,
+ "learning_rate": 1.912387863466798e-06,
+ "loss": 0.4752,
+ "step": 1144
+ },
+ {
+ "epoch": 3.5793319415448854,
+ "grad_norm": 1.1036733388900757,
+ "learning_rate": 1.9081803958531967e-06,
+ "loss": 0.4879,
+ "step": 1145
+ },
+ {
+ "epoch": 3.5824634655532357,
+ "grad_norm": 0.8561109304428101,
+ "learning_rate": 1.9039747033052395e-06,
+ "loss": 0.4409,
+ "step": 1146
+ },
+ {
+ "epoch": 3.585594989561587,
+ "grad_norm": 0.7597541809082031,
+ "learning_rate": 1.8997707984372119e-06,
+ "loss": 0.4518,
+ "step": 1147
+ },
+ {
+ "epoch": 3.588726513569937,
+ "grad_norm": 0.7225353121757507,
+ "learning_rate": 1.8955686938580329e-06,
+ "loss": 0.4735,
+ "step": 1148
+ },
+ {
+ "epoch": 3.5918580375782883,
+ "grad_norm": 0.9409791231155396,
+ "learning_rate": 1.8913684021712264e-06,
+ "loss": 0.4249,
+ "step": 1149
+ },
+ {
+ "epoch": 3.594989561586639,
+ "grad_norm": 0.8757275342941284,
+ "learning_rate": 1.8871699359748763e-06,
+ "loss": 0.4479,
+ "step": 1150
+ },
+ {
+ "epoch": 3.5981210855949897,
+ "grad_norm": 0.8090003728866577,
+ "learning_rate": 1.882973307861593e-06,
+ "loss": 0.4846,
+ "step": 1151
+ },
+ {
+ "epoch": 3.6012526096033404,
+ "grad_norm": 0.7568825483322144,
+ "learning_rate": 1.8787785304184726e-06,
+ "loss": 0.4301,
+ "step": 1152
+ },
+ {
+ "epoch": 3.604384133611691,
+ "grad_norm": 0.8233815431594849,
+ "learning_rate": 1.8745856162270592e-06,
+ "loss": 0.4838,
+ "step": 1153
+ },
+ {
+ "epoch": 3.607515657620042,
+ "grad_norm": 0.7817628979682922,
+ "learning_rate": 1.8703945778633121e-06,
+ "loss": 0.4669,
+ "step": 1154
+ },
+ {
+ "epoch": 3.6106471816283925,
+ "grad_norm": 0.821304202079773,
+ "learning_rate": 1.8662054278975605e-06,
+ "loss": 0.4536,
+ "step": 1155
+ },
+ {
+ "epoch": 3.613778705636743,
+ "grad_norm": 0.9304089546203613,
+ "learning_rate": 1.8620181788944712e-06,
+ "loss": 0.4489,
+ "step": 1156
+ },
+ {
+ "epoch": 3.616910229645094,
+ "grad_norm": 0.8202670216560364,
+ "learning_rate": 1.8578328434130114e-06,
+ "loss": 0.4309,
+ "step": 1157
+ },
+ {
+ "epoch": 3.6200417536534446,
+ "grad_norm": 0.8890257477760315,
+ "learning_rate": 1.8536494340064051e-06,
+ "loss": 0.4736,
+ "step": 1158
+ },
+ {
+ "epoch": 3.6231732776617953,
+ "grad_norm": 0.7940590381622314,
+ "learning_rate": 1.8494679632221013e-06,
+ "loss": 0.4468,
+ "step": 1159
+ },
+ {
+ "epoch": 3.626304801670146,
+ "grad_norm": 0.8388273119926453,
+ "learning_rate": 1.845288443601736e-06,
+ "loss": 0.4753,
+ "step": 1160
+ },
+ {
+ "epoch": 3.6294363256784967,
+ "grad_norm": 0.9392285346984863,
+ "learning_rate": 1.84111088768109e-06,
+ "loss": 0.4736,
+ "step": 1161
+ },
+ {
+ "epoch": 3.632567849686848,
+ "grad_norm": 0.7411681413650513,
+ "learning_rate": 1.8369353079900576e-06,
+ "loss": 0.4303,
+ "step": 1162
+ },
+ {
+ "epoch": 3.635699373695198,
+ "grad_norm": 0.8722569942474365,
+ "learning_rate": 1.8327617170526014e-06,
+ "loss": 0.4604,
+ "step": 1163
+ },
+ {
+ "epoch": 3.6388308977035493,
+ "grad_norm": 0.786891758441925,
+ "learning_rate": 1.8285901273867229e-06,
+ "loss": 0.4756,
+ "step": 1164
+ },
+ {
+ "epoch": 3.6419624217118995,
+ "grad_norm": 0.8159083724021912,
+ "learning_rate": 1.824420551504419e-06,
+ "loss": 0.4675,
+ "step": 1165
+ },
+ {
+ "epoch": 3.6450939457202507,
+ "grad_norm": 0.8271334767341614,
+ "learning_rate": 1.8202530019116487e-06,
+ "loss": 0.4311,
+ "step": 1166
+ },
+ {
+ "epoch": 3.6482254697286014,
+ "grad_norm": 0.7617189288139343,
+ "learning_rate": 1.816087491108292e-06,
+ "loss": 0.4522,
+ "step": 1167
+ },
+ {
+ "epoch": 3.651356993736952,
+ "grad_norm": 0.7248172760009766,
+ "learning_rate": 1.8119240315881126e-06,
+ "loss": 0.465,
+ "step": 1168
+ },
+ {
+ "epoch": 3.654488517745303,
+ "grad_norm": 0.8606911897659302,
+ "learning_rate": 1.8077626358387235e-06,
+ "loss": 0.4524,
+ "step": 1169
+ },
+ {
+ "epoch": 3.6576200417536535,
+ "grad_norm": 0.8571308851242065,
+ "learning_rate": 1.8036033163415484e-06,
+ "loss": 0.4625,
+ "step": 1170
+ },
+ {
+ "epoch": 3.6607515657620042,
+ "grad_norm": 0.7239511609077454,
+ "learning_rate": 1.7994460855717812e-06,
+ "loss": 0.5025,
+ "step": 1171
+ },
+ {
+ "epoch": 3.663883089770355,
+ "grad_norm": 0.7958929538726807,
+ "learning_rate": 1.7952909559983544e-06,
+ "loss": 0.4382,
+ "step": 1172
+ },
+ {
+ "epoch": 3.6670146137787056,
+ "grad_norm": 0.7920124530792236,
+ "learning_rate": 1.7911379400838947e-06,
+ "loss": 0.4393,
+ "step": 1173
+ },
+ {
+ "epoch": 3.6701461377870563,
+ "grad_norm": 0.8072578310966492,
+ "learning_rate": 1.7869870502846903e-06,
+ "loss": 0.4627,
+ "step": 1174
+ },
+ {
+ "epoch": 3.673277661795407,
+ "grad_norm": 0.8586218357086182,
+ "learning_rate": 1.7828382990506543e-06,
+ "loss": 0.4456,
+ "step": 1175
+ },
+ {
+ "epoch": 3.6764091858037578,
+ "grad_norm": 0.8741613030433655,
+ "learning_rate": 1.7786916988252845e-06,
+ "loss": 0.4613,
+ "step": 1176
+ },
+ {
+ "epoch": 3.6795407098121085,
+ "grad_norm": 0.7691352367401123,
+ "learning_rate": 1.774547262045626e-06,
+ "loss": 0.4641,
+ "step": 1177
+ },
+ {
+ "epoch": 3.682672233820459,
+ "grad_norm": 0.7866089940071106,
+ "learning_rate": 1.7704050011422357e-06,
+ "loss": 0.4308,
+ "step": 1178
+ },
+ {
+ "epoch": 3.68580375782881,
+ "grad_norm": 0.9934884309768677,
+ "learning_rate": 1.7662649285391447e-06,
+ "loss": 0.4434,
+ "step": 1179
+ },
+ {
+ "epoch": 3.6889352818371606,
+ "grad_norm": 0.794385552406311,
+ "learning_rate": 1.7621270566538204e-06,
+ "loss": 0.4481,
+ "step": 1180
+ },
+ {
+ "epoch": 3.6920668058455117,
+ "grad_norm": 0.7573548555374146,
+ "learning_rate": 1.7579913978971296e-06,
+ "loss": 0.4525,
+ "step": 1181
+ },
+ {
+ "epoch": 3.695198329853862,
+ "grad_norm": 0.7073976993560791,
+ "learning_rate": 1.7538579646733023e-06,
+ "loss": 0.4214,
+ "step": 1182
+ },
+ {
+ "epoch": 3.698329853862213,
+ "grad_norm": 0.8009579181671143,
+ "learning_rate": 1.7497267693798902e-06,
+ "loss": 0.4542,
+ "step": 1183
+ },
+ {
+ "epoch": 3.701461377870564,
+ "grad_norm": 1.4488778114318848,
+ "learning_rate": 1.7455978244077348e-06,
+ "loss": 0.443,
+ "step": 1184
+ },
+ {
+ "epoch": 3.7045929018789145,
+ "grad_norm": 1.0529266595840454,
+ "learning_rate": 1.7414711421409292e-06,
+ "loss": 0.4908,
+ "step": 1185
+ },
+ {
+ "epoch": 3.7077244258872653,
+ "grad_norm": 0.757431149482727,
+ "learning_rate": 1.7373467349567775e-06,
+ "loss": 0.4454,
+ "step": 1186
+ },
+ {
+ "epoch": 3.710855949895616,
+ "grad_norm": 0.7086379528045654,
+ "learning_rate": 1.733224615225763e-06,
+ "loss": 0.4292,
+ "step": 1187
+ },
+ {
+ "epoch": 3.7139874739039667,
+ "grad_norm": 0.7454110383987427,
+ "learning_rate": 1.7291047953115049e-06,
+ "loss": 0.4421,
+ "step": 1188
+ },
+ {
+ "epoch": 3.7171189979123174,
+ "grad_norm": 0.804027795791626,
+ "learning_rate": 1.7249872875707257e-06,
+ "loss": 0.4819,
+ "step": 1189
+ },
+ {
+ "epoch": 3.720250521920668,
+ "grad_norm": 0.8159645199775696,
+ "learning_rate": 1.7208721043532146e-06,
+ "loss": 0.4628,
+ "step": 1190
+ },
+ {
+ "epoch": 3.723382045929019,
+ "grad_norm": 0.8451672196388245,
+ "learning_rate": 1.7167592580017866e-06,
+ "loss": 0.4773,
+ "step": 1191
+ },
+ {
+ "epoch": 3.7265135699373695,
+ "grad_norm": 0.920553982257843,
+ "learning_rate": 1.7126487608522492e-06,
+ "loss": 0.4787,
+ "step": 1192
+ },
+ {
+ "epoch": 3.72964509394572,
+ "grad_norm": 0.9169708490371704,
+ "learning_rate": 1.7085406252333613e-06,
+ "loss": 0.4543,
+ "step": 1193
+ },
+ {
+ "epoch": 3.732776617954071,
+ "grad_norm": 0.7245096564292908,
+ "learning_rate": 1.7044348634668023e-06,
+ "loss": 0.4599,
+ "step": 1194
+ },
+ {
+ "epoch": 3.7359081419624216,
+ "grad_norm": 0.835832417011261,
+ "learning_rate": 1.7003314878671284e-06,
+ "loss": 0.4794,
+ "step": 1195
+ },
+ {
+ "epoch": 3.7390396659707723,
+ "grad_norm": 0.8455896973609924,
+ "learning_rate": 1.696230510741742e-06,
+ "loss": 0.4329,
+ "step": 1196
+ },
+ {
+ "epoch": 3.742171189979123,
+ "grad_norm": 0.743611752986908,
+ "learning_rate": 1.692131944390849e-06,
+ "loss": 0.4455,
+ "step": 1197
+ },
+ {
+ "epoch": 3.745302713987474,
+ "grad_norm": 1.1334915161132812,
+ "learning_rate": 1.6880358011074272e-06,
+ "loss": 0.4673,
+ "step": 1198
+ },
+ {
+ "epoch": 3.7484342379958244,
+ "grad_norm": 0.6935724020004272,
+ "learning_rate": 1.6839420931771828e-06,
+ "loss": 0.4686,
+ "step": 1199
+ },
+ {
+ "epoch": 3.7515657620041756,
+ "grad_norm": 0.962006151676178,
+ "learning_rate": 1.6798508328785213e-06,
+ "loss": 0.4474,
+ "step": 1200
+ },
+ {
+ "epoch": 3.754697286012526,
+ "grad_norm": 1.1499714851379395,
+ "learning_rate": 1.6757620324825047e-06,
+ "loss": 0.4659,
+ "step": 1201
+ },
+ {
+ "epoch": 3.757828810020877,
+ "grad_norm": 0.7689645886421204,
+ "learning_rate": 1.6716757042528192e-06,
+ "loss": 0.4552,
+ "step": 1202
+ },
+ {
+ "epoch": 3.7609603340292277,
+ "grad_norm": 0.7482030391693115,
+ "learning_rate": 1.6675918604457352e-06,
+ "loss": 0.4747,
+ "step": 1203
+ },
+ {
+ "epoch": 3.7640918580375784,
+ "grad_norm": 0.7727032899856567,
+ "learning_rate": 1.6635105133100686e-06,
+ "loss": 0.4508,
+ "step": 1204
+ },
+ {
+ "epoch": 3.767223382045929,
+ "grad_norm": 0.8722149133682251,
+ "learning_rate": 1.6594316750871514e-06,
+ "loss": 0.4685,
+ "step": 1205
+ },
+ {
+ "epoch": 3.77035490605428,
+ "grad_norm": 0.771304726600647,
+ "learning_rate": 1.6553553580107884e-06,
+ "loss": 0.4418,
+ "step": 1206
+ },
+ {
+ "epoch": 3.7734864300626305,
+ "grad_norm": 0.767315149307251,
+ "learning_rate": 1.6512815743072214e-06,
+ "loss": 0.4532,
+ "step": 1207
+ },
+ {
+ "epoch": 3.776617954070981,
+ "grad_norm": 0.8825518488883972,
+ "learning_rate": 1.6472103361950976e-06,
+ "loss": 0.468,
+ "step": 1208
+ },
+ {
+ "epoch": 3.779749478079332,
+ "grad_norm": 0.8887981176376343,
+ "learning_rate": 1.6431416558854243e-06,
+ "loss": 0.4264,
+ "step": 1209
+ },
+ {
+ "epoch": 3.7828810020876826,
+ "grad_norm": 0.8399733304977417,
+ "learning_rate": 1.63907554558154e-06,
+ "loss": 0.4405,
+ "step": 1210
+ },
+ {
+ "epoch": 3.7860125260960333,
+ "grad_norm": 0.8112586140632629,
+ "learning_rate": 1.6350120174790751e-06,
+ "loss": 0.445,
+ "step": 1211
+ },
+ {
+ "epoch": 3.789144050104384,
+ "grad_norm": 0.860775351524353,
+ "learning_rate": 1.6309510837659137e-06,
+ "loss": 0.4557,
+ "step": 1212
+ },
+ {
+ "epoch": 3.7922755741127347,
+ "grad_norm": 0.8522343039512634,
+ "learning_rate": 1.626892756622161e-06,
+ "loss": 0.481,
+ "step": 1213
+ },
+ {
+ "epoch": 3.7954070981210855,
+ "grad_norm": 0.7927511930465698,
+ "learning_rate": 1.6228370482200988e-06,
+ "loss": 0.4328,
+ "step": 1214
+ },
+ {
+ "epoch": 3.798538622129436,
+ "grad_norm": 0.7350064516067505,
+ "learning_rate": 1.6187839707241604e-06,
+ "loss": 0.4604,
+ "step": 1215
+ },
+ {
+ "epoch": 3.801670146137787,
+ "grad_norm": 0.8363698124885559,
+ "learning_rate": 1.6147335362908847e-06,
+ "loss": 0.4271,
+ "step": 1216
+ },
+ {
+ "epoch": 3.804801670146138,
+ "grad_norm": 1.080613613128662,
+ "learning_rate": 1.610685757068885e-06,
+ "loss": 0.447,
+ "step": 1217
+ },
+ {
+ "epoch": 3.8079331941544883,
+ "grad_norm": 1.1507478952407837,
+ "learning_rate": 1.6066406451988104e-06,
+ "loss": 0.4664,
+ "step": 1218
+ },
+ {
+ "epoch": 3.8110647181628394,
+ "grad_norm": 0.7778187990188599,
+ "learning_rate": 1.6025982128133073e-06,
+ "loss": 0.456,
+ "step": 1219
+ },
+ {
+ "epoch": 3.81419624217119,
+ "grad_norm": 0.8383583426475525,
+ "learning_rate": 1.5985584720369876e-06,
+ "loss": 0.4684,
+ "step": 1220
+ },
+ {
+ "epoch": 3.817327766179541,
+ "grad_norm": 0.7743321061134338,
+ "learning_rate": 1.5945214349863914e-06,
+ "loss": 0.4567,
+ "step": 1221
+ },
+ {
+ "epoch": 3.8204592901878915,
+ "grad_norm": 0.8020774126052856,
+ "learning_rate": 1.5904871137699462e-06,
+ "loss": 0.4175,
+ "step": 1222
+ },
+ {
+ "epoch": 3.8235908141962422,
+ "grad_norm": 0.790318489074707,
+ "learning_rate": 1.5864555204879375e-06,
+ "loss": 0.469,
+ "step": 1223
+ },
+ {
+ "epoch": 3.826722338204593,
+ "grad_norm": 0.8583689332008362,
+ "learning_rate": 1.5824266672324652e-06,
+ "loss": 0.4931,
+ "step": 1224
+ },
+ {
+ "epoch": 3.8298538622129437,
+ "grad_norm": 0.7788206934928894,
+ "learning_rate": 1.5784005660874125e-06,
+ "loss": 0.4643,
+ "step": 1225
+ },
+ {
+ "epoch": 3.8329853862212944,
+ "grad_norm": 0.8385717868804932,
+ "learning_rate": 1.574377229128409e-06,
+ "loss": 0.4567,
+ "step": 1226
+ },
+ {
+ "epoch": 3.836116910229645,
+ "grad_norm": 0.8447727560997009,
+ "learning_rate": 1.5703566684227922e-06,
+ "loss": 0.42,
+ "step": 1227
+ },
+ {
+ "epoch": 3.8392484342379958,
+ "grad_norm": 0.7286496758460999,
+ "learning_rate": 1.5663388960295742e-06,
+ "loss": 0.4603,
+ "step": 1228
+ },
+ {
+ "epoch": 3.8423799582463465,
+ "grad_norm": 0.8493947982788086,
+ "learning_rate": 1.562323923999401e-06,
+ "loss": 0.4731,
+ "step": 1229
+ },
+ {
+ "epoch": 3.845511482254697,
+ "grad_norm": 0.8641151785850525,
+ "learning_rate": 1.5583117643745233e-06,
+ "loss": 0.4491,
+ "step": 1230
+ },
+ {
+ "epoch": 3.848643006263048,
+ "grad_norm": 0.9493702054023743,
+ "learning_rate": 1.5543024291887532e-06,
+ "loss": 0.411,
+ "step": 1231
+ },
+ {
+ "epoch": 3.8517745302713986,
+ "grad_norm": 0.7246205806732178,
+ "learning_rate": 1.5502959304674337e-06,
+ "loss": 0.4569,
+ "step": 1232
+ },
+ {
+ "epoch": 3.8549060542797493,
+ "grad_norm": 0.7576872110366821,
+ "learning_rate": 1.5462922802273994e-06,
+ "loss": 0.4548,
+ "step": 1233
+ },
+ {
+ "epoch": 3.8580375782881005,
+ "grad_norm": 0.7710747718811035,
+ "learning_rate": 1.5422914904769404e-06,
+ "loss": 0.447,
+ "step": 1234
+ },
+ {
+ "epoch": 3.8611691022964507,
+ "grad_norm": 0.7661204934120178,
+ "learning_rate": 1.5382935732157677e-06,
+ "loss": 0.4601,
+ "step": 1235
+ },
+ {
+ "epoch": 3.864300626304802,
+ "grad_norm": 0.7133070826530457,
+ "learning_rate": 1.5342985404349788e-06,
+ "loss": 0.4245,
+ "step": 1236
+ },
+ {
+ "epoch": 3.867432150313152,
+ "grad_norm": 0.7716459631919861,
+ "learning_rate": 1.5303064041170163e-06,
+ "loss": 0.4543,
+ "step": 1237
+ },
+ {
+ "epoch": 3.8705636743215033,
+ "grad_norm": 0.737501859664917,
+ "learning_rate": 1.5263171762356388e-06,
+ "loss": 0.4405,
+ "step": 1238
+ },
+ {
+ "epoch": 3.873695198329854,
+ "grad_norm": 0.7885998487472534,
+ "learning_rate": 1.5223308687558786e-06,
+ "loss": 0.4412,
+ "step": 1239
+ },
+ {
+ "epoch": 3.8768267223382047,
+ "grad_norm": 0.7516661882400513,
+ "learning_rate": 1.5183474936340092e-06,
+ "loss": 0.4185,
+ "step": 1240
+ },
+ {
+ "epoch": 3.8799582463465554,
+ "grad_norm": 0.8790446519851685,
+ "learning_rate": 1.5143670628175111e-06,
+ "loss": 0.444,
+ "step": 1241
+ },
+ {
+ "epoch": 3.883089770354906,
+ "grad_norm": 0.7695789337158203,
+ "learning_rate": 1.5103895882450315e-06,
+ "loss": 0.4615,
+ "step": 1242
+ },
+ {
+ "epoch": 3.886221294363257,
+ "grad_norm": 0.7859196662902832,
+ "learning_rate": 1.506415081846353e-06,
+ "loss": 0.4379,
+ "step": 1243
+ },
+ {
+ "epoch": 3.8893528183716075,
+ "grad_norm": 0.7771942615509033,
+ "learning_rate": 1.5024435555423522e-06,
+ "loss": 0.5013,
+ "step": 1244
+ },
+ {
+ "epoch": 3.892484342379958,
+ "grad_norm": 0.7415695190429688,
+ "learning_rate": 1.498475021244971e-06,
+ "loss": 0.4861,
+ "step": 1245
+ },
+ {
+ "epoch": 3.895615866388309,
+ "grad_norm": 0.7909391522407532,
+ "learning_rate": 1.4945094908571755e-06,
+ "loss": 0.4599,
+ "step": 1246
+ },
+ {
+ "epoch": 3.8987473903966596,
+ "grad_norm": 0.7749060988426208,
+ "learning_rate": 1.490546976272923e-06,
+ "loss": 0.3986,
+ "step": 1247
+ },
+ {
+ "epoch": 3.9018789144050103,
+ "grad_norm": 0.810681164264679,
+ "learning_rate": 1.4865874893771248e-06,
+ "loss": 0.4495,
+ "step": 1248
+ },
+ {
+ "epoch": 3.905010438413361,
+ "grad_norm": 0.8018531799316406,
+ "learning_rate": 1.4826310420456103e-06,
+ "loss": 0.4426,
+ "step": 1249
+ },
+ {
+ "epoch": 3.9081419624217117,
+ "grad_norm": 0.756064236164093,
+ "learning_rate": 1.4786776461450924e-06,
+ "loss": 0.4474,
+ "step": 1250
+ },
+ {
+ "epoch": 3.911273486430063,
+ "grad_norm": 0.7581740021705627,
+ "learning_rate": 1.4747273135331347e-06,
+ "loss": 0.4494,
+ "step": 1251
+ },
+ {
+ "epoch": 3.914405010438413,
+ "grad_norm": 0.7666076421737671,
+ "learning_rate": 1.4707800560581086e-06,
+ "loss": 0.4593,
+ "step": 1252
+ },
+ {
+ "epoch": 3.9175365344467643,
+ "grad_norm": 0.7339973449707031,
+ "learning_rate": 1.4668358855591664e-06,
+ "loss": 0.4682,
+ "step": 1253
+ },
+ {
+ "epoch": 3.9206680584551146,
+ "grad_norm": 0.8504599928855896,
+ "learning_rate": 1.4628948138661974e-06,
+ "loss": 0.4504,
+ "step": 1254
+ },
+ {
+ "epoch": 3.9237995824634657,
+ "grad_norm": 0.8332642912864685,
+ "learning_rate": 1.4589568527997985e-06,
+ "loss": 0.5024,
+ "step": 1255
+ },
+ {
+ "epoch": 3.9269311064718164,
+ "grad_norm": 0.7813694477081299,
+ "learning_rate": 1.4550220141712384e-06,
+ "loss": 0.4547,
+ "step": 1256
+ },
+ {
+ "epoch": 3.930062630480167,
+ "grad_norm": 0.842258632183075,
+ "learning_rate": 1.451090309782417e-06,
+ "loss": 0.4584,
+ "step": 1257
+ },
+ {
+ "epoch": 3.933194154488518,
+ "grad_norm": 0.8159133791923523,
+ "learning_rate": 1.4471617514258373e-06,
+ "loss": 0.4538,
+ "step": 1258
+ },
+ {
+ "epoch": 3.9363256784968685,
+ "grad_norm": 0.8117021322250366,
+ "learning_rate": 1.4432363508845626e-06,
+ "loss": 0.4315,
+ "step": 1259
+ },
+ {
+ "epoch": 3.9394572025052192,
+ "grad_norm": 0.8087465167045593,
+ "learning_rate": 1.4393141199321881e-06,
+ "loss": 0.4367,
+ "step": 1260
+ },
+ {
+ "epoch": 3.94258872651357,
+ "grad_norm": 0.7954697012901306,
+ "learning_rate": 1.435395070332801e-06,
+ "loss": 0.4515,
+ "step": 1261
+ },
+ {
+ "epoch": 3.9457202505219207,
+ "grad_norm": 0.7305286526679993,
+ "learning_rate": 1.4314792138409454e-06,
+ "loss": 0.4879,
+ "step": 1262
+ },
+ {
+ "epoch": 3.9488517745302714,
+ "grad_norm": 0.8883433938026428,
+ "learning_rate": 1.4275665622015908e-06,
+ "loss": 0.4489,
+ "step": 1263
+ },
+ {
+ "epoch": 3.951983298538622,
+ "grad_norm": 0.8176298141479492,
+ "learning_rate": 1.4236571271500909e-06,
+ "loss": 0.4583,
+ "step": 1264
+ },
+ {
+ "epoch": 3.9551148225469728,
+ "grad_norm": 0.8042430281639099,
+ "learning_rate": 1.4197509204121563e-06,
+ "loss": 0.4277,
+ "step": 1265
+ },
+ {
+ "epoch": 3.9582463465553235,
+ "grad_norm": 0.8153829574584961,
+ "learning_rate": 1.4158479537038095e-06,
+ "loss": 0.4389,
+ "step": 1266
+ },
+ {
+ "epoch": 3.961377870563674,
+ "grad_norm": 0.7908188104629517,
+ "learning_rate": 1.4119482387313588e-06,
+ "loss": 0.4421,
+ "step": 1267
+ },
+ {
+ "epoch": 3.964509394572025,
+ "grad_norm": 0.831758975982666,
+ "learning_rate": 1.4080517871913596e-06,
+ "loss": 0.4308,
+ "step": 1268
+ },
+ {
+ "epoch": 3.9676409185803756,
+ "grad_norm": 0.8191989064216614,
+ "learning_rate": 1.4041586107705758e-06,
+ "loss": 0.4654,
+ "step": 1269
+ },
+ {
+ "epoch": 3.9707724425887267,
+ "grad_norm": 0.9455055594444275,
+ "learning_rate": 1.4002687211459524e-06,
+ "loss": 0.4668,
+ "step": 1270
+ },
+ {
+ "epoch": 3.973903966597077,
+ "grad_norm": 0.9271034002304077,
+ "learning_rate": 1.396382129984572e-06,
+ "loss": 0.4414,
+ "step": 1271
+ },
+ {
+ "epoch": 3.977035490605428,
+ "grad_norm": 0.7652955651283264,
+ "learning_rate": 1.392498848943627e-06,
+ "loss": 0.4575,
+ "step": 1272
+ },
+ {
+ "epoch": 3.980167014613779,
+ "grad_norm": 0.7850046157836914,
+ "learning_rate": 1.3886188896703816e-06,
+ "loss": 0.4554,
+ "step": 1273
+ },
+ {
+ "epoch": 3.9832985386221296,
+ "grad_norm": 0.7194349765777588,
+ "learning_rate": 1.3847422638021357e-06,
+ "loss": 0.437,
+ "step": 1274
+ },
+ {
+ "epoch": 3.9864300626304803,
+ "grad_norm": 0.8726270198822021,
+ "learning_rate": 1.3808689829661899e-06,
+ "loss": 0.4657,
+ "step": 1275
+ },
+ {
+ "epoch": 3.989561586638831,
+ "grad_norm": 0.7741451263427734,
+ "learning_rate": 1.3769990587798146e-06,
+ "loss": 0.3931,
+ "step": 1276
+ },
+ {
+ "epoch": 3.9926931106471817,
+ "grad_norm": 0.9160324931144714,
+ "learning_rate": 1.3731325028502116e-06,
+ "loss": 0.4358,
+ "step": 1277
+ },
+ {
+ "epoch": 3.9958246346555324,
+ "grad_norm": 0.7819761037826538,
+ "learning_rate": 1.3692693267744806e-06,
+ "loss": 0.4767,
+ "step": 1278
+ },
+ {
+ "epoch": 3.998956158663883,
+ "grad_norm": 0.7595860958099365,
+ "learning_rate": 1.365409542139583e-06,
+ "loss": 0.4217,
+ "step": 1279
+ },
+ {
+ "epoch": 4.0,
+ "grad_norm": 0.7595860958099365,
+ "learning_rate": 1.361553160522307e-06,
+ "loss": 0.1478,
+ "step": 1280
+ },
+ {
+ "epoch": 4.003131524008351,
+ "grad_norm": 1.0793898105621338,
+ "learning_rate": 1.357700193489237e-06,
+ "loss": 0.4453,
+ "step": 1281
+ },
+ {
+ "epoch": 4.006263048016701,
+ "grad_norm": 0.8228668570518494,
+ "learning_rate": 1.3538506525967148e-06,
+ "loss": 0.3981,
+ "step": 1282
+ },
+ {
+ "epoch": 4.009394572025053,
+ "grad_norm": 0.7946518063545227,
+ "learning_rate": 1.3500045493908044e-06,
+ "loss": 0.423,
+ "step": 1283
+ },
+ {
+ "epoch": 4.012526096033403,
+ "grad_norm": 0.7924147248268127,
+ "learning_rate": 1.3461618954072614e-06,
+ "loss": 0.4211,
+ "step": 1284
+ },
+ {
+ "epoch": 4.015657620041754,
+ "grad_norm": 0.891967236995697,
+ "learning_rate": 1.3423227021714937e-06,
+ "loss": 0.4365,
+ "step": 1285
+ },
+ {
+ "epoch": 4.018789144050104,
+ "grad_norm": 0.9970533847808838,
+ "learning_rate": 1.3384869811985323e-06,
+ "loss": 0.4053,
+ "step": 1286
+ },
+ {
+ "epoch": 4.021920668058455,
+ "grad_norm": 1.001489281654358,
+ "learning_rate": 1.33465474399299e-06,
+ "loss": 0.4382,
+ "step": 1287
+ },
+ {
+ "epoch": 4.025052192066806,
+ "grad_norm": 0.7584173083305359,
+ "learning_rate": 1.3308260020490332e-06,
+ "loss": 0.441,
+ "step": 1288
+ },
+ {
+ "epoch": 4.028183716075157,
+ "grad_norm": 0.7444385290145874,
+ "learning_rate": 1.327000766850345e-06,
+ "loss": 0.4172,
+ "step": 1289
+ },
+ {
+ "epoch": 4.031315240083507,
+ "grad_norm": 0.8264429569244385,
+ "learning_rate": 1.3231790498700886e-06,
+ "loss": 0.3956,
+ "step": 1290
+ },
+ {
+ "epoch": 4.034446764091858,
+ "grad_norm": 0.7339959740638733,
+ "learning_rate": 1.319360862570877e-06,
+ "loss": 0.4431,
+ "step": 1291
+ },
+ {
+ "epoch": 4.0375782881002085,
+ "grad_norm": 0.9837627410888672,
+ "learning_rate": 1.3155462164047345e-06,
+ "loss": 0.4439,
+ "step": 1292
+ },
+ {
+ "epoch": 4.04070981210856,
+ "grad_norm": 0.8792996406555176,
+ "learning_rate": 1.3117351228130657e-06,
+ "loss": 0.4664,
+ "step": 1293
+ },
+ {
+ "epoch": 4.04384133611691,
+ "grad_norm": 0.8377962112426758,
+ "learning_rate": 1.3079275932266205e-06,
+ "loss": 0.4267,
+ "step": 1294
+ },
+ {
+ "epoch": 4.046972860125261,
+ "grad_norm": 0.755979597568512,
+ "learning_rate": 1.3041236390654577e-06,
+ "loss": 0.4394,
+ "step": 1295
+ },
+ {
+ "epoch": 4.050104384133611,
+ "grad_norm": 0.7994539737701416,
+ "learning_rate": 1.3003232717389114e-06,
+ "loss": 0.4547,
+ "step": 1296
+ },
+ {
+ "epoch": 4.053235908141962,
+ "grad_norm": 0.8308882117271423,
+ "learning_rate": 1.2965265026455598e-06,
+ "loss": 0.408,
+ "step": 1297
+ },
+ {
+ "epoch": 4.056367432150314,
+ "grad_norm": 0.8120949268341064,
+ "learning_rate": 1.292733343173188e-06,
+ "loss": 0.3524,
+ "step": 1298
+ },
+ {
+ "epoch": 4.059498956158664,
+ "grad_norm": 0.7639186978340149,
+ "learning_rate": 1.288943804698755e-06,
+ "loss": 0.4677,
+ "step": 1299
+ },
+ {
+ "epoch": 4.062630480167015,
+ "grad_norm": 0.8439323306083679,
+ "learning_rate": 1.2851578985883586e-06,
+ "loss": 0.436,
+ "step": 1300
+ },
+ {
+ "epoch": 4.065762004175365,
+ "grad_norm": 0.7270680069923401,
+ "learning_rate": 1.2813756361972001e-06,
+ "loss": 0.4519,
+ "step": 1301
+ },
+ {
+ "epoch": 4.068893528183716,
+ "grad_norm": 0.7746233940124512,
+ "learning_rate": 1.2775970288695554e-06,
+ "loss": 0.4503,
+ "step": 1302
+ },
+ {
+ "epoch": 4.072025052192067,
+ "grad_norm": 0.7725366950035095,
+ "learning_rate": 1.2738220879387376e-06,
+ "loss": 0.4467,
+ "step": 1303
+ },
+ {
+ "epoch": 4.075156576200418,
+ "grad_norm": 0.7371346950531006,
+ "learning_rate": 1.2700508247270597e-06,
+ "loss": 0.4144,
+ "step": 1304
+ },
+ {
+ "epoch": 4.078288100208768,
+ "grad_norm": 0.778403639793396,
+ "learning_rate": 1.2662832505458057e-06,
+ "loss": 0.4345,
+ "step": 1305
+ },
+ {
+ "epoch": 4.081419624217119,
+ "grad_norm": 0.8683139085769653,
+ "learning_rate": 1.2625193766951957e-06,
+ "loss": 0.4345,
+ "step": 1306
+ },
+ {
+ "epoch": 4.0845511482254695,
+ "grad_norm": 0.7849321365356445,
+ "learning_rate": 1.2587592144643513e-06,
+ "loss": 0.4505,
+ "step": 1307
+ },
+ {
+ "epoch": 4.087682672233821,
+ "grad_norm": 0.9351589679718018,
+ "learning_rate": 1.2550027751312618e-06,
+ "loss": 0.4173,
+ "step": 1308
+ },
+ {
+ "epoch": 4.090814196242171,
+ "grad_norm": 0.7938134074211121,
+ "learning_rate": 1.2512500699627489e-06,
+ "loss": 0.4061,
+ "step": 1309
+ },
+ {
+ "epoch": 4.093945720250522,
+ "grad_norm": 0.8015687465667725,
+ "learning_rate": 1.2475011102144337e-06,
+ "loss": 0.423,
+ "step": 1310
+ },
+ {
+ "epoch": 4.097077244258872,
+ "grad_norm": 0.8477723598480225,
+ "learning_rate": 1.2437559071307062e-06,
+ "loss": 0.4475,
+ "step": 1311
+ },
+ {
+ "epoch": 4.1002087682672235,
+ "grad_norm": 0.7643985152244568,
+ "learning_rate": 1.2400144719446885e-06,
+ "loss": 0.4613,
+ "step": 1312
+ },
+ {
+ "epoch": 4.103340292275574,
+ "grad_norm": 0.7612454295158386,
+ "learning_rate": 1.2362768158781985e-06,
+ "loss": 0.4333,
+ "step": 1313
+ },
+ {
+ "epoch": 4.106471816283925,
+ "grad_norm": 0.9039669036865234,
+ "learning_rate": 1.2325429501417232e-06,
+ "loss": 0.4091,
+ "step": 1314
+ },
+ {
+ "epoch": 4.109603340292275,
+ "grad_norm": 0.8423281908035278,
+ "learning_rate": 1.228812885934378e-06,
+ "loss": 0.4482,
+ "step": 1315
+ },
+ {
+ "epoch": 4.112734864300626,
+ "grad_norm": 0.8980326652526855,
+ "learning_rate": 1.2250866344438782e-06,
+ "loss": 0.4354,
+ "step": 1316
+ },
+ {
+ "epoch": 4.115866388308977,
+ "grad_norm": 0.7539350986480713,
+ "learning_rate": 1.221364206846502e-06,
+ "loss": 0.4332,
+ "step": 1317
+ },
+ {
+ "epoch": 4.118997912317328,
+ "grad_norm": 0.855925977230072,
+ "learning_rate": 1.2176456143070597e-06,
+ "loss": 0.4198,
+ "step": 1318
+ },
+ {
+ "epoch": 4.122129436325679,
+ "grad_norm": 0.7506774663925171,
+ "learning_rate": 1.2139308679788594e-06,
+ "loss": 0.4484,
+ "step": 1319
+ },
+ {
+ "epoch": 4.125260960334029,
+ "grad_norm": 0.820524275302887,
+ "learning_rate": 1.2102199790036709e-06,
+ "loss": 0.4208,
+ "step": 1320
+ },
+ {
+ "epoch": 4.12839248434238,
+ "grad_norm": 0.8255232572555542,
+ "learning_rate": 1.2065129585116968e-06,
+ "loss": 0.4431,
+ "step": 1321
+ },
+ {
+ "epoch": 4.1315240083507305,
+ "grad_norm": 0.7454650402069092,
+ "learning_rate": 1.2028098176215347e-06,
+ "loss": 0.3833,
+ "step": 1322
+ },
+ {
+ "epoch": 4.134655532359082,
+ "grad_norm": 0.7500002980232239,
+ "learning_rate": 1.199110567440148e-06,
+ "loss": 0.4486,
+ "step": 1323
+ },
+ {
+ "epoch": 4.137787056367432,
+ "grad_norm": 0.783704400062561,
+ "learning_rate": 1.1954152190628304e-06,
+ "loss": 0.4535,
+ "step": 1324
+ },
+ {
+ "epoch": 4.140918580375783,
+ "grad_norm": 0.7665114402770996,
+ "learning_rate": 1.1917237835731713e-06,
+ "loss": 0.4575,
+ "step": 1325
+ },
+ {
+ "epoch": 4.144050104384133,
+ "grad_norm": 0.7992947697639465,
+ "learning_rate": 1.1880362720430235e-06,
+ "loss": 0.4294,
+ "step": 1326
+ },
+ {
+ "epoch": 4.1471816283924845,
+ "grad_norm": 0.8273386359214783,
+ "learning_rate": 1.1843526955324736e-06,
+ "loss": 0.4578,
+ "step": 1327
+ },
+ {
+ "epoch": 4.150313152400835,
+ "grad_norm": 0.7992604970932007,
+ "learning_rate": 1.1806730650898029e-06,
+ "loss": 0.4125,
+ "step": 1328
+ },
+ {
+ "epoch": 4.153444676409186,
+ "grad_norm": 0.7935183644294739,
+ "learning_rate": 1.1769973917514612e-06,
+ "loss": 0.4537,
+ "step": 1329
+ },
+ {
+ "epoch": 4.156576200417536,
+ "grad_norm": 0.8761199116706848,
+ "learning_rate": 1.173325686542022e-06,
+ "loss": 0.4433,
+ "step": 1330
+ },
+ {
+ "epoch": 4.159707724425887,
+ "grad_norm": 0.7764279246330261,
+ "learning_rate": 1.1696579604741643e-06,
+ "loss": 0.4165,
+ "step": 1331
+ },
+ {
+ "epoch": 4.162839248434238,
+ "grad_norm": 0.9479373693466187,
+ "learning_rate": 1.165994224548629e-06,
+ "loss": 0.42,
+ "step": 1332
+ },
+ {
+ "epoch": 4.165970772442589,
+ "grad_norm": 1.1482913494110107,
+ "learning_rate": 1.162334489754191e-06,
+ "loss": 0.4545,
+ "step": 1333
+ },
+ {
+ "epoch": 4.16910229645094,
+ "grad_norm": 0.825964093208313,
+ "learning_rate": 1.1586787670676228e-06,
+ "loss": 0.4423,
+ "step": 1334
+ },
+ {
+ "epoch": 4.17223382045929,
+ "grad_norm": 0.8352368474006653,
+ "learning_rate": 1.1550270674536626e-06,
+ "loss": 0.4509,
+ "step": 1335
+ },
+ {
+ "epoch": 4.175365344467641,
+ "grad_norm": 0.8211585879325867,
+ "learning_rate": 1.1513794018649846e-06,
+ "loss": 0.4482,
+ "step": 1336
+ },
+ {
+ "epoch": 4.1784968684759916,
+ "grad_norm": 0.7863073945045471,
+ "learning_rate": 1.1477357812421628e-06,
+ "loss": 0.4318,
+ "step": 1337
+ },
+ {
+ "epoch": 4.181628392484343,
+ "grad_norm": 0.8023143410682678,
+ "learning_rate": 1.1440962165136366e-06,
+ "loss": 0.4269,
+ "step": 1338
+ },
+ {
+ "epoch": 4.184759916492693,
+ "grad_norm": 0.8101564049720764,
+ "learning_rate": 1.1404607185956843e-06,
+ "loss": 0.4284,
+ "step": 1339
+ },
+ {
+ "epoch": 4.187891440501044,
+ "grad_norm": 0.755865216255188,
+ "learning_rate": 1.1368292983923826e-06,
+ "loss": 0.4195,
+ "step": 1340
+ },
+ {
+ "epoch": 4.191022964509394,
+ "grad_norm": 0.7991048097610474,
+ "learning_rate": 1.1332019667955805e-06,
+ "loss": 0.3774,
+ "step": 1341
+ },
+ {
+ "epoch": 4.1941544885177455,
+ "grad_norm": 0.7407388091087341,
+ "learning_rate": 1.1295787346848641e-06,
+ "loss": 0.4321,
+ "step": 1342
+ },
+ {
+ "epoch": 4.197286012526096,
+ "grad_norm": 0.8769527673721313,
+ "learning_rate": 1.1259596129275205e-06,
+ "loss": 0.4349,
+ "step": 1343
+ },
+ {
+ "epoch": 4.200417536534447,
+ "grad_norm": 0.7908845543861389,
+ "learning_rate": 1.1223446123785128e-06,
+ "loss": 0.4595,
+ "step": 1344
+ },
+ {
+ "epoch": 4.203549060542797,
+ "grad_norm": 0.810578465461731,
+ "learning_rate": 1.1187337438804394e-06,
+ "loss": 0.4464,
+ "step": 1345
+ },
+ {
+ "epoch": 4.206680584551148,
+ "grad_norm": 0.8122991323471069,
+ "learning_rate": 1.1151270182635082e-06,
+ "loss": 0.4437,
+ "step": 1346
+ },
+ {
+ "epoch": 4.209812108559499,
+ "grad_norm": 0.8065986633300781,
+ "learning_rate": 1.1115244463454988e-06,
+ "loss": 0.4198,
+ "step": 1347
+ },
+ {
+ "epoch": 4.21294363256785,
+ "grad_norm": 0.7652766108512878,
+ "learning_rate": 1.107926038931734e-06,
+ "loss": 0.4141,
+ "step": 1348
+ },
+ {
+ "epoch": 4.2160751565762,
+ "grad_norm": 0.7936646938323975,
+ "learning_rate": 1.1043318068150457e-06,
+ "loss": 0.43,
+ "step": 1349
+ },
+ {
+ "epoch": 4.219206680584551,
+ "grad_norm": 0.8732025027275085,
+ "learning_rate": 1.1007417607757435e-06,
+ "loss": 0.4243,
+ "step": 1350
+ },
+ {
+ "epoch": 4.222338204592901,
+ "grad_norm": 0.8516861200332642,
+ "learning_rate": 1.0971559115815795e-06,
+ "loss": 0.4352,
+ "step": 1351
+ },
+ {
+ "epoch": 4.225469728601253,
+ "grad_norm": 0.7985629439353943,
+ "learning_rate": 1.093574269987718e-06,
+ "loss": 0.4193,
+ "step": 1352
+ },
+ {
+ "epoch": 4.228601252609604,
+ "grad_norm": 0.7350221872329712,
+ "learning_rate": 1.0899968467367056e-06,
+ "loss": 0.4466,
+ "step": 1353
+ },
+ {
+ "epoch": 4.231732776617954,
+ "grad_norm": 0.7626602649688721,
+ "learning_rate": 1.086423652558436e-06,
+ "loss": 0.399,
+ "step": 1354
+ },
+ {
+ "epoch": 4.234864300626305,
+ "grad_norm": 0.7596367597579956,
+ "learning_rate": 1.082854698170117e-06,
+ "loss": 0.4023,
+ "step": 1355
+ },
+ {
+ "epoch": 4.237995824634655,
+ "grad_norm": 0.7802876830101013,
+ "learning_rate": 1.0792899942762406e-06,
+ "loss": 0.4351,
+ "step": 1356
+ },
+ {
+ "epoch": 4.241127348643007,
+ "grad_norm": 1.0809392929077148,
+ "learning_rate": 1.0757295515685504e-06,
+ "loss": 0.4271,
+ "step": 1357
+ },
+ {
+ "epoch": 4.244258872651357,
+ "grad_norm": 0.8581835031509399,
+ "learning_rate": 1.0721733807260094e-06,
+ "loss": 0.4332,
+ "step": 1358
+ },
+ {
+ "epoch": 4.247390396659708,
+ "grad_norm": 0.884496808052063,
+ "learning_rate": 1.0686214924147686e-06,
+ "loss": 0.4317,
+ "step": 1359
+ },
+ {
+ "epoch": 4.250521920668058,
+ "grad_norm": 0.7651242017745972,
+ "learning_rate": 1.0650738972881325e-06,
+ "loss": 0.4614,
+ "step": 1360
+ },
+ {
+ "epoch": 4.253653444676409,
+ "grad_norm": 0.7798812985420227,
+ "learning_rate": 1.0615306059865286e-06,
+ "loss": 0.4088,
+ "step": 1361
+ },
+ {
+ "epoch": 4.25678496868476,
+ "grad_norm": 0.9011983871459961,
+ "learning_rate": 1.0579916291374776e-06,
+ "loss": 0.4242,
+ "step": 1362
+ },
+ {
+ "epoch": 4.259916492693111,
+ "grad_norm": 0.7490668892860413,
+ "learning_rate": 1.0544569773555602e-06,
+ "loss": 0.4369,
+ "step": 1363
+ },
+ {
+ "epoch": 4.263048016701461,
+ "grad_norm": 0.7936168909072876,
+ "learning_rate": 1.0509266612423819e-06,
+ "loss": 0.4321,
+ "step": 1364
+ },
+ {
+ "epoch": 4.266179540709812,
+ "grad_norm": 0.8428751826286316,
+ "learning_rate": 1.047400691386547e-06,
+ "loss": 0.4391,
+ "step": 1365
+ },
+ {
+ "epoch": 4.2693110647181625,
+ "grad_norm": 0.7458098530769348,
+ "learning_rate": 1.0438790783636213e-06,
+ "loss": 0.4789,
+ "step": 1366
+ },
+ {
+ "epoch": 4.272442588726514,
+ "grad_norm": 0.8869938254356384,
+ "learning_rate": 1.0403618327361056e-06,
+ "loss": 0.4181,
+ "step": 1367
+ },
+ {
+ "epoch": 4.275574112734864,
+ "grad_norm": 0.7711350321769714,
+ "learning_rate": 1.0368489650533989e-06,
+ "loss": 0.4334,
+ "step": 1368
+ },
+ {
+ "epoch": 4.278705636743215,
+ "grad_norm": 0.7643832564353943,
+ "learning_rate": 1.0333404858517712e-06,
+ "loss": 0.4444,
+ "step": 1369
+ },
+ {
+ "epoch": 4.281837160751566,
+ "grad_norm": 0.925422191619873,
+ "learning_rate": 1.02983640565433e-06,
+ "loss": 0.4134,
+ "step": 1370
+ },
+ {
+ "epoch": 4.284968684759916,
+ "grad_norm": 1.4223377704620361,
+ "learning_rate": 1.0263367349709866e-06,
+ "loss": 0.4441,
+ "step": 1371
+ },
+ {
+ "epoch": 4.288100208768268,
+ "grad_norm": 0.9713383316993713,
+ "learning_rate": 1.0228414842984297e-06,
+ "loss": 0.418,
+ "step": 1372
+ },
+ {
+ "epoch": 4.291231732776618,
+ "grad_norm": 0.8378648161888123,
+ "learning_rate": 1.0193506641200874e-06,
+ "loss": 0.4017,
+ "step": 1373
+ },
+ {
+ "epoch": 4.294363256784969,
+ "grad_norm": 0.9343763589859009,
+ "learning_rate": 1.0158642849061018e-06,
+ "loss": 0.4468,
+ "step": 1374
+ },
+ {
+ "epoch": 4.297494780793319,
+ "grad_norm": 0.7863892912864685,
+ "learning_rate": 1.012382357113296e-06,
+ "loss": 0.4235,
+ "step": 1375
+ },
+ {
+ "epoch": 4.30062630480167,
+ "grad_norm": 0.8109874725341797,
+ "learning_rate": 1.0089048911851388e-06,
+ "loss": 0.4356,
+ "step": 1376
+ },
+ {
+ "epoch": 4.303757828810021,
+ "grad_norm": 0.7978389859199524,
+ "learning_rate": 1.005431897551717e-06,
+ "loss": 0.4517,
+ "step": 1377
+ },
+ {
+ "epoch": 4.306889352818372,
+ "grad_norm": 0.8213954567909241,
+ "learning_rate": 1.001963386629705e-06,
+ "loss": 0.4263,
+ "step": 1378
+ },
+ {
+ "epoch": 4.310020876826722,
+ "grad_norm": 0.7367708683013916,
+ "learning_rate": 9.984993688223319e-07,
+ "loss": 0.4467,
+ "step": 1379
+ },
+ {
+ "epoch": 4.313152400835073,
+ "grad_norm": 0.8723400831222534,
+ "learning_rate": 9.950398545193508e-07,
+ "loss": 0.4517,
+ "step": 1380
+ },
+ {
+ "epoch": 4.3162839248434235,
+ "grad_norm": 0.7927054166793823,
+ "learning_rate": 9.915848540970033e-07,
+ "loss": 0.4755,
+ "step": 1381
+ },
+ {
+ "epoch": 4.319415448851775,
+ "grad_norm": 0.8598108291625977,
+ "learning_rate": 9.88134377917997e-07,
+ "loss": 0.4539,
+ "step": 1382
+ },
+ {
+ "epoch": 4.322546972860125,
+ "grad_norm": 1.0660723447799683,
+ "learning_rate": 9.84688436331468e-07,
+ "loss": 0.4541,
+ "step": 1383
+ },
+ {
+ "epoch": 4.325678496868476,
+ "grad_norm": 0.840942919254303,
+ "learning_rate": 9.81247039672953e-07,
+ "loss": 0.4239,
+ "step": 1384
+ },
+ {
+ "epoch": 4.328810020876826,
+ "grad_norm": 0.8142675757408142,
+ "learning_rate": 9.778101982643549e-07,
+ "loss": 0.4259,
+ "step": 1385
+ },
+ {
+ "epoch": 4.3319415448851775,
+ "grad_norm": 0.7841728329658508,
+ "learning_rate": 9.743779224139133e-07,
+ "loss": 0.432,
+ "step": 1386
+ },
+ {
+ "epoch": 4.335073068893529,
+ "grad_norm": 0.9642646908760071,
+ "learning_rate": 9.709502224161769e-07,
+ "loss": 0.4423,
+ "step": 1387
+ },
+ {
+ "epoch": 4.338204592901879,
+ "grad_norm": 0.7967872619628906,
+ "learning_rate": 9.675271085519692e-07,
+ "loss": 0.4428,
+ "step": 1388
+ },
+ {
+ "epoch": 4.34133611691023,
+ "grad_norm": 0.7652244567871094,
+ "learning_rate": 9.64108591088356e-07,
+ "loss": 0.4607,
+ "step": 1389
+ },
+ {
+ "epoch": 4.34446764091858,
+ "grad_norm": 0.8806408643722534,
+ "learning_rate": 9.606946802786204e-07,
+ "loss": 0.3859,
+ "step": 1390
+ },
+ {
+ "epoch": 4.347599164926931,
+ "grad_norm": 0.8638584613800049,
+ "learning_rate": 9.572853863622252e-07,
+ "loss": 0.3758,
+ "step": 1391
+ },
+ {
+ "epoch": 4.350730688935282,
+ "grad_norm": 0.7262263894081116,
+ "learning_rate": 9.538807195647882e-07,
+ "loss": 0.4435,
+ "step": 1392
+ },
+ {
+ "epoch": 4.353862212943633,
+ "grad_norm": 0.785175621509552,
+ "learning_rate": 9.504806900980482e-07,
+ "loss": 0.4037,
+ "step": 1393
+ },
+ {
+ "epoch": 4.356993736951983,
+ "grad_norm": 0.8310196995735168,
+ "learning_rate": 9.470853081598338e-07,
+ "loss": 0.4434,
+ "step": 1394
+ },
+ {
+ "epoch": 4.360125260960334,
+ "grad_norm": 0.7547351717948914,
+ "learning_rate": 9.436945839340364e-07,
+ "loss": 0.4318,
+ "step": 1395
+ },
+ {
+ "epoch": 4.3632567849686845,
+ "grad_norm": 0.816460907459259,
+ "learning_rate": 9.403085275905746e-07,
+ "loss": 0.4173,
+ "step": 1396
+ },
+ {
+ "epoch": 4.366388308977036,
+ "grad_norm": 0.8159666657447815,
+ "learning_rate": 9.369271492853696e-07,
+ "loss": 0.4506,
+ "step": 1397
+ },
+ {
+ "epoch": 4.369519832985386,
+ "grad_norm": 0.8234115839004517,
+ "learning_rate": 9.335504591603084e-07,
+ "loss": 0.4186,
+ "step": 1398
+ },
+ {
+ "epoch": 4.372651356993737,
+ "grad_norm": 0.9084867238998413,
+ "learning_rate": 9.301784673432187e-07,
+ "loss": 0.4414,
+ "step": 1399
+ },
+ {
+ "epoch": 4.375782881002087,
+ "grad_norm": 0.7826300263404846,
+ "learning_rate": 9.268111839478369e-07,
+ "loss": 0.4415,
+ "step": 1400
+ },
+ {
+ "epoch": 4.3789144050104385,
+ "grad_norm": 0.7957226634025574,
+ "learning_rate": 9.23448619073774e-07,
+ "loss": 0.4307,
+ "step": 1401
+ },
+ {
+ "epoch": 4.382045929018789,
+ "grad_norm": 0.7768528461456299,
+ "learning_rate": 9.200907828064931e-07,
+ "loss": 0.4107,
+ "step": 1402
+ },
+ {
+ "epoch": 4.38517745302714,
+ "grad_norm": 0.878063440322876,
+ "learning_rate": 9.167376852172702e-07,
+ "loss": 0.4232,
+ "step": 1403
+ },
+ {
+ "epoch": 4.388308977035491,
+ "grad_norm": 0.7961450219154358,
+ "learning_rate": 9.133893363631713e-07,
+ "loss": 0.449,
+ "step": 1404
+ },
+ {
+ "epoch": 4.391440501043841,
+ "grad_norm": 0.8734915256500244,
+ "learning_rate": 9.100457462870197e-07,
+ "loss": 0.4392,
+ "step": 1405
+ },
+ {
+ "epoch": 4.3945720250521925,
+ "grad_norm": 0.7659426331520081,
+ "learning_rate": 9.06706925017363e-07,
+ "loss": 0.4391,
+ "step": 1406
+ },
+ {
+ "epoch": 4.397703549060543,
+ "grad_norm": 0.949674665927887,
+ "learning_rate": 9.033728825684466e-07,
+ "loss": 0.4054,
+ "step": 1407
+ },
+ {
+ "epoch": 4.400835073068894,
+ "grad_norm": 0.8209534287452698,
+ "learning_rate": 9.000436289401832e-07,
+ "loss": 0.3973,
+ "step": 1408
+ },
+ {
+ "epoch": 4.403966597077244,
+ "grad_norm": 0.9697580933570862,
+ "learning_rate": 8.967191741181225e-07,
+ "loss": 0.4228,
+ "step": 1409
+ },
+ {
+ "epoch": 4.407098121085595,
+ "grad_norm": 0.7681871652603149,
+ "learning_rate": 8.933995280734217e-07,
+ "loss": 0.4183,
+ "step": 1410
+ },
+ {
+ "epoch": 4.4102296450939455,
+ "grad_norm": 0.7434380054473877,
+ "learning_rate": 8.900847007628103e-07,
+ "loss": 0.4068,
+ "step": 1411
+ },
+ {
+ "epoch": 4.413361169102297,
+ "grad_norm": 0.9835489988327026,
+ "learning_rate": 8.867747021285697e-07,
+ "loss": 0.4356,
+ "step": 1412
+ },
+ {
+ "epoch": 4.416492693110647,
+ "grad_norm": 0.7727295160293579,
+ "learning_rate": 8.834695420984971e-07,
+ "loss": 0.4201,
+ "step": 1413
+ },
+ {
+ "epoch": 4.419624217118998,
+ "grad_norm": 0.8018465042114258,
+ "learning_rate": 8.801692305858775e-07,
+ "loss": 0.4471,
+ "step": 1414
+ },
+ {
+ "epoch": 4.422755741127348,
+ "grad_norm": 0.8191575407981873,
+ "learning_rate": 8.76873777489452e-07,
+ "loss": 0.4438,
+ "step": 1415
+ },
+ {
+ "epoch": 4.4258872651356995,
+ "grad_norm": 0.7755978107452393,
+ "learning_rate": 8.735831926933896e-07,
+ "loss": 0.4441,
+ "step": 1416
+ },
+ {
+ "epoch": 4.42901878914405,
+ "grad_norm": 0.8469848036766052,
+ "learning_rate": 8.702974860672603e-07,
+ "loss": 0.4134,
+ "step": 1417
+ },
+ {
+ "epoch": 4.432150313152401,
+ "grad_norm": 0.7887343168258667,
+ "learning_rate": 8.67016667466001e-07,
+ "loss": 0.4247,
+ "step": 1418
+ },
+ {
+ "epoch": 4.435281837160751,
+ "grad_norm": 0.7888761758804321,
+ "learning_rate": 8.637407467298867e-07,
+ "loss": 0.4313,
+ "step": 1419
+ },
+ {
+ "epoch": 4.438413361169102,
+ "grad_norm": 0.8109477162361145,
+ "learning_rate": 8.60469733684505e-07,
+ "loss": 0.4472,
+ "step": 1420
+ },
+ {
+ "epoch": 4.4415448851774535,
+ "grad_norm": 0.8876966238021851,
+ "learning_rate": 8.572036381407203e-07,
+ "loss": 0.4395,
+ "step": 1421
+ },
+ {
+ "epoch": 4.444676409185804,
+ "grad_norm": 0.7669360637664795,
+ "learning_rate": 8.539424698946503e-07,
+ "loss": 0.3943,
+ "step": 1422
+ },
+ {
+ "epoch": 4.447807933194154,
+ "grad_norm": 0.7810583710670471,
+ "learning_rate": 8.506862387276338e-07,
+ "loss": 0.4189,
+ "step": 1423
+ },
+ {
+ "epoch": 4.450939457202505,
+ "grad_norm": 0.9363948702812195,
+ "learning_rate": 8.474349544061996e-07,
+ "loss": 0.4245,
+ "step": 1424
+ },
+ {
+ "epoch": 4.454070981210856,
+ "grad_norm": 0.8147290349006653,
+ "learning_rate": 8.441886266820418e-07,
+ "loss": 0.4381,
+ "step": 1425
+ },
+ {
+ "epoch": 4.457202505219207,
+ "grad_norm": 0.9227074384689331,
+ "learning_rate": 8.409472652919873e-07,
+ "loss": 0.4289,
+ "step": 1426
+ },
+ {
+ "epoch": 4.460334029227558,
+ "grad_norm": 0.8162581920623779,
+ "learning_rate": 8.377108799579661e-07,
+ "loss": 0.4565,
+ "step": 1427
+ },
+ {
+ "epoch": 4.463465553235908,
+ "grad_norm": 0.7634908556938171,
+ "learning_rate": 8.344794803869835e-07,
+ "loss": 0.4402,
+ "step": 1428
+ },
+ {
+ "epoch": 4.466597077244259,
+ "grad_norm": 0.7960299253463745,
+ "learning_rate": 8.312530762710924e-07,
+ "loss": 0.4533,
+ "step": 1429
+ },
+ {
+ "epoch": 4.469728601252609,
+ "grad_norm": 0.8322834968566895,
+ "learning_rate": 8.280316772873617e-07,
+ "loss": 0.4282,
+ "step": 1430
+ },
+ {
+ "epoch": 4.4728601252609606,
+ "grad_norm": 0.7285633683204651,
+ "learning_rate": 8.248152930978498e-07,
+ "loss": 0.4287,
+ "step": 1431
+ },
+ {
+ "epoch": 4.475991649269311,
+ "grad_norm": 0.760178804397583,
+ "learning_rate": 8.21603933349569e-07,
+ "loss": 0.4243,
+ "step": 1432
+ },
+ {
+ "epoch": 4.479123173277662,
+ "grad_norm": 0.8069238662719727,
+ "learning_rate": 8.183976076744671e-07,
+ "loss": 0.4352,
+ "step": 1433
+ },
+ {
+ "epoch": 4.482254697286012,
+ "grad_norm": 0.8068020343780518,
+ "learning_rate": 8.151963256893911e-07,
+ "loss": 0.4143,
+ "step": 1434
+ },
+ {
+ "epoch": 4.485386221294363,
+ "grad_norm": 0.7954995036125183,
+ "learning_rate": 8.120000969960606e-07,
+ "loss": 0.4122,
+ "step": 1435
+ },
+ {
+ "epoch": 4.488517745302714,
+ "grad_norm": 0.791763961315155,
+ "learning_rate": 8.088089311810379e-07,
+ "loss": 0.4016,
+ "step": 1436
+ },
+ {
+ "epoch": 4.491649269311065,
+ "grad_norm": 0.8935474157333374,
+ "learning_rate": 8.056228378156994e-07,
+ "loss": 0.4392,
+ "step": 1437
+ },
+ {
+ "epoch": 4.494780793319415,
+ "grad_norm": 0.791764497756958,
+ "learning_rate": 8.024418264562094e-07,
+ "loss": 0.4433,
+ "step": 1438
+ },
+ {
+ "epoch": 4.497912317327766,
+ "grad_norm": 0.8151891827583313,
+ "learning_rate": 7.992659066434899e-07,
+ "loss": 0.4161,
+ "step": 1439
+ },
+ {
+ "epoch": 4.5010438413361165,
+ "grad_norm": 0.9389538168907166,
+ "learning_rate": 7.960950879031884e-07,
+ "loss": 0.4405,
+ "step": 1440
+ },
+ {
+ "epoch": 4.504175365344468,
+ "grad_norm": 0.7620337009429932,
+ "learning_rate": 7.929293797456561e-07,
+ "loss": 0.4443,
+ "step": 1441
+ },
+ {
+ "epoch": 4.507306889352819,
+ "grad_norm": 0.7784422039985657,
+ "learning_rate": 7.897687916659133e-07,
+ "loss": 0.4688,
+ "step": 1442
+ },
+ {
+ "epoch": 4.510438413361169,
+ "grad_norm": 0.759872555732727,
+ "learning_rate": 7.866133331436249e-07,
+ "loss": 0.4349,
+ "step": 1443
+ },
+ {
+ "epoch": 4.51356993736952,
+ "grad_norm": 0.8815323114395142,
+ "learning_rate": 7.834630136430713e-07,
+ "loss": 0.4729,
+ "step": 1444
+ },
+ {
+ "epoch": 4.51670146137787,
+ "grad_norm": 0.7574021816253662,
+ "learning_rate": 7.803178426131161e-07,
+ "loss": 0.4263,
+ "step": 1445
+ },
+ {
+ "epoch": 4.519832985386222,
+ "grad_norm": 0.7910036444664001,
+ "learning_rate": 7.771778294871849e-07,
+ "loss": 0.424,
+ "step": 1446
+ },
+ {
+ "epoch": 4.522964509394572,
+ "grad_norm": 0.8388727903366089,
+ "learning_rate": 7.740429836832295e-07,
+ "loss": 0.4495,
+ "step": 1447
+ },
+ {
+ "epoch": 4.526096033402923,
+ "grad_norm": 0.7924241423606873,
+ "learning_rate": 7.709133146037059e-07,
+ "loss": 0.4157,
+ "step": 1448
+ },
+ {
+ "epoch": 4.529227557411273,
+ "grad_norm": 0.8103229403495789,
+ "learning_rate": 7.677888316355411e-07,
+ "loss": 0.4321,
+ "step": 1449
+ },
+ {
+ "epoch": 4.532359081419624,
+ "grad_norm": 0.8175046443939209,
+ "learning_rate": 7.64669544150109e-07,
+ "loss": 0.4375,
+ "step": 1450
+ },
+ {
+ "epoch": 4.535490605427975,
+ "grad_norm": 0.8193264603614807,
+ "learning_rate": 7.615554615032e-07,
+ "loss": 0.4576,
+ "step": 1451
+ },
+ {
+ "epoch": 4.538622129436326,
+ "grad_norm": 0.7646095156669617,
+ "learning_rate": 7.584465930349924e-07,
+ "loss": 0.4095,
+ "step": 1452
+ },
+ {
+ "epoch": 4.541753653444676,
+ "grad_norm": 0.8025760054588318,
+ "learning_rate": 7.553429480700275e-07,
+ "loss": 0.4387,
+ "step": 1453
+ },
+ {
+ "epoch": 4.544885177453027,
+ "grad_norm": 0.7339401841163635,
+ "learning_rate": 7.522445359171768e-07,
+ "loss": 0.4263,
+ "step": 1454
+ },
+ {
+ "epoch": 4.5480167014613775,
+ "grad_norm": 0.764500617980957,
+ "learning_rate": 7.491513658696189e-07,
+ "loss": 0.4694,
+ "step": 1455
+ },
+ {
+ "epoch": 4.551148225469729,
+ "grad_norm": 0.7974144220352173,
+ "learning_rate": 7.460634472048104e-07,
+ "loss": 0.4335,
+ "step": 1456
+ },
+ {
+ "epoch": 4.554279749478079,
+ "grad_norm": 0.7994454503059387,
+ "learning_rate": 7.429807891844546e-07,
+ "loss": 0.4155,
+ "step": 1457
+ },
+ {
+ "epoch": 4.55741127348643,
+ "grad_norm": 0.8552598357200623,
+ "learning_rate": 7.39903401054477e-07,
+ "loss": 0.428,
+ "step": 1458
+ },
+ {
+ "epoch": 4.560542797494781,
+ "grad_norm": 0.8604694604873657,
+ "learning_rate": 7.368312920449985e-07,
+ "loss": 0.4562,
+ "step": 1459
+ },
+ {
+ "epoch": 4.5636743215031315,
+ "grad_norm": 0.7699970602989197,
+ "learning_rate": 7.337644713703049e-07,
+ "loss": 0.4322,
+ "step": 1460
+ },
+ {
+ "epoch": 4.566805845511483,
+ "grad_norm": 0.9426809549331665,
+ "learning_rate": 7.307029482288227e-07,
+ "loss": 0.387,
+ "step": 1461
+ },
+ {
+ "epoch": 4.569937369519833,
+ "grad_norm": 0.8332602381706238,
+ "learning_rate": 7.276467318030841e-07,
+ "loss": 0.4381,
+ "step": 1462
+ },
+ {
+ "epoch": 4.573068893528184,
+ "grad_norm": 0.7953102588653564,
+ "learning_rate": 7.245958312597095e-07,
+ "loss": 0.3994,
+ "step": 1463
+ },
+ {
+ "epoch": 4.576200417536534,
+ "grad_norm": 0.7888246178627014,
+ "learning_rate": 7.215502557493743e-07,
+ "loss": 0.4191,
+ "step": 1464
+ },
+ {
+ "epoch": 4.579331941544885,
+ "grad_norm": 1.00043523311615,
+ "learning_rate": 7.185100144067816e-07,
+ "loss": 0.4321,
+ "step": 1465
+ },
+ {
+ "epoch": 4.582463465553236,
+ "grad_norm": 0.8209455609321594,
+ "learning_rate": 7.154751163506354e-07,
+ "loss": 0.3841,
+ "step": 1466
+ },
+ {
+ "epoch": 4.585594989561587,
+ "grad_norm": 0.7574297785758972,
+ "learning_rate": 7.124455706836131e-07,
+ "loss": 0.452,
+ "step": 1467
+ },
+ {
+ "epoch": 4.588726513569937,
+ "grad_norm": 0.8509530425071716,
+ "learning_rate": 7.094213864923397e-07,
+ "loss": 0.4225,
+ "step": 1468
+ },
+ {
+ "epoch": 4.591858037578288,
+ "grad_norm": 0.7906093597412109,
+ "learning_rate": 7.064025728473589e-07,
+ "loss": 0.418,
+ "step": 1469
+ },
+ {
+ "epoch": 4.5949895615866385,
+ "grad_norm": 0.7408525943756104,
+ "learning_rate": 7.033891388031056e-07,
+ "loss": 0.4366,
+ "step": 1470
+ },
+ {
+ "epoch": 4.59812108559499,
+ "grad_norm": 0.766342043876648,
+ "learning_rate": 7.003810933978805e-07,
+ "loss": 0.4315,
+ "step": 1471
+ },
+ {
+ "epoch": 4.60125260960334,
+ "grad_norm": 0.7852875590324402,
+ "learning_rate": 6.973784456538207e-07,
+ "loss": 0.4332,
+ "step": 1472
+ },
+ {
+ "epoch": 4.604384133611691,
+ "grad_norm": 0.7975596785545349,
+ "learning_rate": 6.943812045768753e-07,
+ "loss": 0.4563,
+ "step": 1473
+ },
+ {
+ "epoch": 4.607515657620041,
+ "grad_norm": 0.755770742893219,
+ "learning_rate": 6.913893791567767e-07,
+ "loss": 0.417,
+ "step": 1474
+ },
+ {
+ "epoch": 4.6106471816283925,
+ "grad_norm": 0.8083193302154541,
+ "learning_rate": 6.884029783670123e-07,
+ "loss": 0.4285,
+ "step": 1475
+ },
+ {
+ "epoch": 4.613778705636744,
+ "grad_norm": 0.8146732449531555,
+ "learning_rate": 6.854220111648022e-07,
+ "loss": 0.4587,
+ "step": 1476
+ },
+ {
+ "epoch": 4.616910229645094,
+ "grad_norm": 0.7958261370658875,
+ "learning_rate": 6.82446486491066e-07,
+ "loss": 0.4584,
+ "step": 1477
+ },
+ {
+ "epoch": 4.620041753653445,
+ "grad_norm": 0.9063606262207031,
+ "learning_rate": 6.79476413270402e-07,
+ "loss": 0.4056,
+ "step": 1478
+ },
+ {
+ "epoch": 4.623173277661795,
+ "grad_norm": 0.8176311254501343,
+ "learning_rate": 6.76511800411056e-07,
+ "loss": 0.3987,
+ "step": 1479
+ },
+ {
+ "epoch": 4.6263048016701465,
+ "grad_norm": 0.7843388319015503,
+ "learning_rate": 6.735526568048975e-07,
+ "loss": 0.4541,
+ "step": 1480
+ },
+ {
+ "epoch": 4.629436325678497,
+ "grad_norm": 0.7491182684898376,
+ "learning_rate": 6.705989913273922e-07,
+ "loss": 0.4218,
+ "step": 1481
+ },
+ {
+ "epoch": 4.632567849686848,
+ "grad_norm": 0.8836645483970642,
+ "learning_rate": 6.676508128375728e-07,
+ "loss": 0.4153,
+ "step": 1482
+ },
+ {
+ "epoch": 4.635699373695198,
+ "grad_norm": 0.7614302635192871,
+ "learning_rate": 6.647081301780175e-07,
+ "loss": 0.4557,
+ "step": 1483
+ },
+ {
+ "epoch": 4.638830897703549,
+ "grad_norm": 0.8144665360450745,
+ "learning_rate": 6.617709521748181e-07,
+ "loss": 0.4103,
+ "step": 1484
+ },
+ {
+ "epoch": 4.6419624217118995,
+ "grad_norm": 0.757859468460083,
+ "learning_rate": 6.588392876375579e-07,
+ "loss": 0.4498,
+ "step": 1485
+ },
+ {
+ "epoch": 4.645093945720251,
+ "grad_norm": 0.8298365473747253,
+ "learning_rate": 6.559131453592837e-07,
+ "loss": 0.4318,
+ "step": 1486
+ },
+ {
+ "epoch": 4.648225469728601,
+ "grad_norm": 0.9429208636283875,
+ "learning_rate": 6.529925341164781e-07,
+ "loss": 0.4272,
+ "step": 1487
+ },
+ {
+ "epoch": 4.651356993736952,
+ "grad_norm": 0.7761420011520386,
+ "learning_rate": 6.500774626690329e-07,
+ "loss": 0.4224,
+ "step": 1488
+ },
+ {
+ "epoch": 4.654488517745302,
+ "grad_norm": 0.8003484606742859,
+ "learning_rate": 6.471679397602273e-07,
+ "loss": 0.4333,
+ "step": 1489
+ },
+ {
+ "epoch": 4.6576200417536535,
+ "grad_norm": 0.8728703856468201,
+ "learning_rate": 6.44263974116697e-07,
+ "loss": 0.4412,
+ "step": 1490
+ },
+ {
+ "epoch": 4.660751565762004,
+ "grad_norm": 0.7923367023468018,
+ "learning_rate": 6.41365574448411e-07,
+ "loss": 0.4401,
+ "step": 1491
+ },
+ {
+ "epoch": 4.663883089770355,
+ "grad_norm": 0.8110585808753967,
+ "learning_rate": 6.384727494486398e-07,
+ "loss": 0.4152,
+ "step": 1492
+ },
+ {
+ "epoch": 4.667014613778706,
+ "grad_norm": 0.7865241765975952,
+ "learning_rate": 6.355855077939385e-07,
+ "loss": 0.4608,
+ "step": 1493
+ },
+ {
+ "epoch": 4.670146137787056,
+ "grad_norm": 0.8125724196434021,
+ "learning_rate": 6.327038581441136e-07,
+ "loss": 0.419,
+ "step": 1494
+ },
+ {
+ "epoch": 4.673277661795407,
+ "grad_norm": 0.8024583458900452,
+ "learning_rate": 6.298278091422003e-07,
+ "loss": 0.4575,
+ "step": 1495
+ },
+ {
+ "epoch": 4.676409185803758,
+ "grad_norm": 0.7569037675857544,
+ "learning_rate": 6.269573694144343e-07,
+ "loss": 0.4257,
+ "step": 1496
+ },
+ {
+ "epoch": 4.679540709812109,
+ "grad_norm": 0.783733069896698,
+ "learning_rate": 6.240925475702275e-07,
+ "loss": 0.4361,
+ "step": 1497
+ },
+ {
+ "epoch": 4.682672233820459,
+ "grad_norm": 0.7900946736335754,
+ "learning_rate": 6.212333522021424e-07,
+ "loss": 0.4372,
+ "step": 1498
+ },
+ {
+ "epoch": 4.68580375782881,
+ "grad_norm": 0.7943285703659058,
+ "learning_rate": 6.183797918858667e-07,
+ "loss": 0.4337,
+ "step": 1499
+ },
+ {
+ "epoch": 4.688935281837161,
+ "grad_norm": 0.7793418765068054,
+ "learning_rate": 6.155318751801842e-07,
+ "loss": 0.4264,
+ "step": 1500
+ },
+ {
+ "epoch": 4.692066805845512,
+ "grad_norm": 1.0244885683059692,
+ "learning_rate": 6.126896106269533e-07,
+ "loss": 0.446,
+ "step": 1501
+ },
+ {
+ "epoch": 4.695198329853862,
+ "grad_norm": 0.8742548823356628,
+ "learning_rate": 6.098530067510808e-07,
+ "loss": 0.3953,
+ "step": 1502
+ },
+ {
+ "epoch": 4.698329853862213,
+ "grad_norm": 0.7844028472900391,
+ "learning_rate": 6.070220720604919e-07,
+ "loss": 0.402,
+ "step": 1503
+ },
+ {
+ "epoch": 4.701461377870563,
+ "grad_norm": 0.7485696077346802,
+ "learning_rate": 6.041968150461119e-07,
+ "loss": 0.4435,
+ "step": 1504
+ },
+ {
+ "epoch": 4.7045929018789145,
+ "grad_norm": 0.8526521921157837,
+ "learning_rate": 6.01377244181833e-07,
+ "loss": 0.4565,
+ "step": 1505
+ },
+ {
+ "epoch": 4.707724425887265,
+ "grad_norm": 0.7832267880439758,
+ "learning_rate": 5.985633679244957e-07,
+ "loss": 0.4489,
+ "step": 1506
+ },
+ {
+ "epoch": 4.710855949895616,
+ "grad_norm": 0.8092924952507019,
+ "learning_rate": 5.957551947138599e-07,
+ "loss": 0.4011,
+ "step": 1507
+ },
+ {
+ "epoch": 4.713987473903966,
+ "grad_norm": 0.8008314371109009,
+ "learning_rate": 5.92952732972579e-07,
+ "loss": 0.4557,
+ "step": 1508
+ },
+ {
+ "epoch": 4.717118997912317,
+ "grad_norm": 0.8025342226028442,
+ "learning_rate": 5.901559911061758e-07,
+ "loss": 0.38,
+ "step": 1509
+ },
+ {
+ "epoch": 4.7202505219206685,
+ "grad_norm": 0.8290171027183533,
+ "learning_rate": 5.873649775030188e-07,
+ "loss": 0.4019,
+ "step": 1510
+ },
+ {
+ "epoch": 4.723382045929019,
+ "grad_norm": 0.8971850872039795,
+ "learning_rate": 5.845797005342943e-07,
+ "loss": 0.4354,
+ "step": 1511
+ },
+ {
+ "epoch": 4.726513569937369,
+ "grad_norm": 0.8087455630302429,
+ "learning_rate": 5.818001685539843e-07,
+ "loss": 0.4537,
+ "step": 1512
+ },
+ {
+ "epoch": 4.72964509394572,
+ "grad_norm": 0.822287380695343,
+ "learning_rate": 5.790263898988355e-07,
+ "loss": 0.4692,
+ "step": 1513
+ },
+ {
+ "epoch": 4.732776617954071,
+ "grad_norm": 1.007889986038208,
+ "learning_rate": 5.762583728883428e-07,
+ "loss": 0.4317,
+ "step": 1514
+ },
+ {
+ "epoch": 4.735908141962422,
+ "grad_norm": 0.7517583966255188,
+ "learning_rate": 5.734961258247177e-07,
+ "loss": 0.4168,
+ "step": 1515
+ },
+ {
+ "epoch": 4.739039665970773,
+ "grad_norm": 1.1469851732254028,
+ "learning_rate": 5.707396569928675e-07,
+ "loss": 0.4412,
+ "step": 1516
+ },
+ {
+ "epoch": 4.742171189979123,
+ "grad_norm": 0.8889291286468506,
+ "learning_rate": 5.679889746603667e-07,
+ "loss": 0.4133,
+ "step": 1517
+ },
+ {
+ "epoch": 4.745302713987474,
+ "grad_norm": 0.7792209386825562,
+ "learning_rate": 5.652440870774342e-07,
+ "loss": 0.4256,
+ "step": 1518
+ },
+ {
+ "epoch": 4.748434237995824,
+ "grad_norm": 0.7862168550491333,
+ "learning_rate": 5.625050024769099e-07,
+ "loss": 0.395,
+ "step": 1519
+ },
+ {
+ "epoch": 4.751565762004176,
+ "grad_norm": 0.8305843472480774,
+ "learning_rate": 5.597717290742285e-07,
+ "loss": 0.4172,
+ "step": 1520
+ },
+ {
+ "epoch": 4.754697286012526,
+ "grad_norm": 0.8393134474754333,
+ "learning_rate": 5.570442750673932e-07,
+ "loss": 0.4438,
+ "step": 1521
+ },
+ {
+ "epoch": 4.757828810020877,
+ "grad_norm": 0.8481642603874207,
+ "learning_rate": 5.543226486369555e-07,
+ "loss": 0.4251,
+ "step": 1522
+ },
+ {
+ "epoch": 4.760960334029227,
+ "grad_norm": 0.8482305407524109,
+ "learning_rate": 5.516068579459849e-07,
+ "loss": 0.4372,
+ "step": 1523
+ },
+ {
+ "epoch": 4.764091858037578,
+ "grad_norm": 0.7428895235061646,
+ "learning_rate": 5.488969111400502e-07,
+ "loss": 0.4017,
+ "step": 1524
+ },
+ {
+ "epoch": 4.767223382045929,
+ "grad_norm": 0.9066843390464783,
+ "learning_rate": 5.461928163471922e-07,
+ "loss": 0.4263,
+ "step": 1525
+ },
+ {
+ "epoch": 4.77035490605428,
+ "grad_norm": 0.8754342794418335,
+ "learning_rate": 5.434945816778972e-07,
+ "loss": 0.4086,
+ "step": 1526
+ },
+ {
+ "epoch": 4.773486430062631,
+ "grad_norm": 0.7580026388168335,
+ "learning_rate": 5.408022152250783e-07,
+ "loss": 0.4331,
+ "step": 1527
+ },
+ {
+ "epoch": 4.776617954070981,
+ "grad_norm": 0.7960991859436035,
+ "learning_rate": 5.381157250640451e-07,
+ "loss": 0.4482,
+ "step": 1528
+ },
+ {
+ "epoch": 4.7797494780793315,
+ "grad_norm": 0.8199532628059387,
+ "learning_rate": 5.354351192524842e-07,
+ "loss": 0.4558,
+ "step": 1529
+ },
+ {
+ "epoch": 4.782881002087683,
+ "grad_norm": 0.8457065224647522,
+ "learning_rate": 5.327604058304312e-07,
+ "loss": 0.4711,
+ "step": 1530
+ },
+ {
+ "epoch": 4.786012526096034,
+ "grad_norm": 0.9078546762466431,
+ "learning_rate": 5.300915928202499e-07,
+ "loss": 0.4341,
+ "step": 1531
+ },
+ {
+ "epoch": 4.789144050104384,
+ "grad_norm": 0.7782992720603943,
+ "learning_rate": 5.274286882266072e-07,
+ "loss": 0.4115,
+ "step": 1532
+ },
+ {
+ "epoch": 4.792275574112735,
+ "grad_norm": 0.894313395023346,
+ "learning_rate": 5.247717000364463e-07,
+ "loss": 0.4415,
+ "step": 1533
+ },
+ {
+ "epoch": 4.7954070981210855,
+ "grad_norm": 0.8233445882797241,
+ "learning_rate": 5.221206362189682e-07,
+ "loss": 0.4198,
+ "step": 1534
+ },
+ {
+ "epoch": 4.798538622129437,
+ "grad_norm": 0.7654063105583191,
+ "learning_rate": 5.194755047256017e-07,
+ "loss": 0.4364,
+ "step": 1535
+ },
+ {
+ "epoch": 4.801670146137787,
+ "grad_norm": 0.7879475951194763,
+ "learning_rate": 5.168363134899845e-07,
+ "loss": 0.4366,
+ "step": 1536
+ },
+ {
+ "epoch": 4.804801670146138,
+ "grad_norm": 0.7754428386688232,
+ "learning_rate": 5.142030704279377e-07,
+ "loss": 0.4365,
+ "step": 1537
+ },
+ {
+ "epoch": 4.807933194154488,
+ "grad_norm": 0.758696436882019,
+ "learning_rate": 5.11575783437441e-07,
+ "loss": 0.4442,
+ "step": 1538
+ },
+ {
+ "epoch": 4.811064718162839,
+ "grad_norm": 0.7870402336120605,
+ "learning_rate": 5.089544603986085e-07,
+ "loss": 0.4298,
+ "step": 1539
+ },
+ {
+ "epoch": 4.81419624217119,
+ "grad_norm": 0.7607670426368713,
+ "learning_rate": 5.063391091736691e-07,
+ "loss": 0.435,
+ "step": 1540
+ },
+ {
+ "epoch": 4.817327766179541,
+ "grad_norm": 0.8175606727600098,
+ "learning_rate": 5.037297376069388e-07,
+ "loss": 0.4325,
+ "step": 1541
+ },
+ {
+ "epoch": 4.820459290187891,
+ "grad_norm": 0.9083423018455505,
+ "learning_rate": 5.011263535248001e-07,
+ "loss": 0.3847,
+ "step": 1542
+ },
+ {
+ "epoch": 4.823590814196242,
+ "grad_norm": 0.7435231804847717,
+ "learning_rate": 4.985289647356731e-07,
+ "loss": 0.4818,
+ "step": 1543
+ },
+ {
+ "epoch": 4.826722338204593,
+ "grad_norm": 1.5190348625183105,
+ "learning_rate": 4.9593757903e-07,
+ "loss": 0.4068,
+ "step": 1544
+ },
+ {
+ "epoch": 4.829853862212944,
+ "grad_norm": 0.9122394919395447,
+ "learning_rate": 4.933522041802166e-07,
+ "loss": 0.4701,
+ "step": 1545
+ },
+ {
+ "epoch": 4.832985386221294,
+ "grad_norm": 0.9334122538566589,
+ "learning_rate": 4.9077284794073e-07,
+ "loss": 0.4178,
+ "step": 1546
+ },
+ {
+ "epoch": 4.836116910229645,
+ "grad_norm": 0.8561045527458191,
+ "learning_rate": 4.881995180478949e-07,
+ "loss": 0.4373,
+ "step": 1547
+ },
+ {
+ "epoch": 4.839248434237996,
+ "grad_norm": 0.8061811923980713,
+ "learning_rate": 4.85632222219991e-07,
+ "loss": 0.4414,
+ "step": 1548
+ },
+ {
+ "epoch": 4.8423799582463465,
+ "grad_norm": 0.7866719961166382,
+ "learning_rate": 4.830709681572008e-07,
+ "loss": 0.438,
+ "step": 1549
+ },
+ {
+ "epoch": 4.845511482254698,
+ "grad_norm": 0.8633884787559509,
+ "learning_rate": 4.805157635415852e-07,
+ "loss": 0.4639,
+ "step": 1550
+ },
+ {
+ "epoch": 4.848643006263048,
+ "grad_norm": 1.125654935836792,
+ "learning_rate": 4.779666160370596e-07,
+ "loss": 0.4364,
+ "step": 1551
+ },
+ {
+ "epoch": 4.851774530271399,
+ "grad_norm": 0.8371828198432922,
+ "learning_rate": 4.7542353328937473e-07,
+ "loss": 0.4436,
+ "step": 1552
+ },
+ {
+ "epoch": 4.854906054279749,
+ "grad_norm": 0.8166123628616333,
+ "learning_rate": 4.7288652292608736e-07,
+ "loss": 0.4382,
+ "step": 1553
+ },
+ {
+ "epoch": 4.8580375782881005,
+ "grad_norm": 0.8664330244064331,
+ "learning_rate": 4.7035559255654393e-07,
+ "loss": 0.4314,
+ "step": 1554
+ },
+ {
+ "epoch": 4.861169102296451,
+ "grad_norm": 0.8051781058311462,
+ "learning_rate": 4.6783074977185485e-07,
+ "loss": 0.4522,
+ "step": 1555
+ },
+ {
+ "epoch": 4.864300626304802,
+ "grad_norm": 0.7385572791099548,
+ "learning_rate": 4.653120021448701e-07,
+ "loss": 0.4254,
+ "step": 1556
+ },
+ {
+ "epoch": 4.867432150313152,
+ "grad_norm": 0.7310816645622253,
+ "learning_rate": 4.6279935723016033e-07,
+ "loss": 0.4277,
+ "step": 1557
+ },
+ {
+ "epoch": 4.870563674321503,
+ "grad_norm": 0.7682080864906311,
+ "learning_rate": 4.602928225639899e-07,
+ "loss": 0.422,
+ "step": 1558
+ },
+ {
+ "epoch": 4.8736951983298535,
+ "grad_norm": 0.8093141317367554,
+ "learning_rate": 4.577924056642985e-07,
+ "loss": 0.4376,
+ "step": 1559
+ },
+ {
+ "epoch": 4.876826722338205,
+ "grad_norm": 0.8006629943847656,
+ "learning_rate": 4.5529811403067503e-07,
+ "loss": 0.4483,
+ "step": 1560
+ },
+ {
+ "epoch": 4.879958246346555,
+ "grad_norm": 0.7647503614425659,
+ "learning_rate": 4.528099551443377e-07,
+ "loss": 0.418,
+ "step": 1561
+ },
+ {
+ "epoch": 4.883089770354906,
+ "grad_norm": 0.8467709422111511,
+ "learning_rate": 4.503279364681104e-07,
+ "loss": 0.4272,
+ "step": 1562
+ },
+ {
+ "epoch": 4.886221294363256,
+ "grad_norm": 0.8667131662368774,
+ "learning_rate": 4.4785206544640055e-07,
+ "loss": 0.4347,
+ "step": 1563
+ },
+ {
+ "epoch": 4.8893528183716075,
+ "grad_norm": 0.8670737147331238,
+ "learning_rate": 4.453823495051748e-07,
+ "loss": 0.4313,
+ "step": 1564
+ },
+ {
+ "epoch": 4.892484342379959,
+ "grad_norm": 0.8131253719329834,
+ "learning_rate": 4.4291879605194157e-07,
+ "loss": 0.4335,
+ "step": 1565
+ },
+ {
+ "epoch": 4.895615866388309,
+ "grad_norm": 0.9633654356002808,
+ "learning_rate": 4.404614124757242e-07,
+ "loss": 0.4637,
+ "step": 1566
+ },
+ {
+ "epoch": 4.89874739039666,
+ "grad_norm": 0.9058681726455688,
+ "learning_rate": 4.3801020614704174e-07,
+ "loss": 0.4503,
+ "step": 1567
+ },
+ {
+ "epoch": 4.90187891440501,
+ "grad_norm": 0.7923601865768433,
+ "learning_rate": 4.3556518441788405e-07,
+ "loss": 0.4526,
+ "step": 1568
+ },
+ {
+ "epoch": 4.9050104384133615,
+ "grad_norm": 0.7733371257781982,
+ "learning_rate": 4.331263546216913e-07,
+ "loss": 0.437,
+ "step": 1569
+ },
+ {
+ "epoch": 4.908141962421712,
+ "grad_norm": 0.7897522449493408,
+ "learning_rate": 4.306937240733336e-07,
+ "loss": 0.4213,
+ "step": 1570
+ },
+ {
+ "epoch": 4.911273486430063,
+ "grad_norm": 0.7857375741004944,
+ "learning_rate": 4.282673000690865e-07,
+ "loss": 0.4576,
+ "step": 1571
+ },
+ {
+ "epoch": 4.914405010438413,
+ "grad_norm": 0.7968363761901855,
+ "learning_rate": 4.258470898866102e-07,
+ "loss": 0.4383,
+ "step": 1572
+ },
+ {
+ "epoch": 4.917536534446764,
+ "grad_norm": 0.762876033782959,
+ "learning_rate": 4.2343310078492626e-07,
+ "loss": 0.4129,
+ "step": 1573
+ },
+ {
+ "epoch": 4.920668058455115,
+ "grad_norm": 0.9070468544960022,
+ "learning_rate": 4.21025340004399e-07,
+ "loss": 0.3994,
+ "step": 1574
+ },
+ {
+ "epoch": 4.923799582463466,
+ "grad_norm": 0.8299228549003601,
+ "learning_rate": 4.186238147667113e-07,
+ "loss": 0.4461,
+ "step": 1575
+ },
+ {
+ "epoch": 4.926931106471816,
+ "grad_norm": 0.8604512214660645,
+ "learning_rate": 4.162285322748441e-07,
+ "loss": 0.4447,
+ "step": 1576
+ },
+ {
+ "epoch": 4.930062630480167,
+ "grad_norm": 0.8441461324691772,
+ "learning_rate": 4.1383949971305207e-07,
+ "loss": 0.4547,
+ "step": 1577
+ },
+ {
+ "epoch": 4.933194154488517,
+ "grad_norm": 0.7836902737617493,
+ "learning_rate": 4.114567242468473e-07,
+ "loss": 0.4311,
+ "step": 1578
+ },
+ {
+ "epoch": 4.9363256784968685,
+ "grad_norm": 0.7957454919815063,
+ "learning_rate": 4.0908021302297205e-07,
+ "loss": 0.4074,
+ "step": 1579
+ },
+ {
+ "epoch": 4.939457202505219,
+ "grad_norm": 0.772109866142273,
+ "learning_rate": 4.0670997316938216e-07,
+ "loss": 0.4696,
+ "step": 1580
+ },
+ {
+ "epoch": 4.94258872651357,
+ "grad_norm": 1.3906599283218384,
+ "learning_rate": 4.043460117952214e-07,
+ "loss": 0.4379,
+ "step": 1581
+ },
+ {
+ "epoch": 4.945720250521921,
+ "grad_norm": 0.9287625551223755,
+ "learning_rate": 4.0198833599080415e-07,
+ "loss": 0.4576,
+ "step": 1582
+ },
+ {
+ "epoch": 4.948851774530271,
+ "grad_norm": 0.8610300421714783,
+ "learning_rate": 3.99636952827592e-07,
+ "loss": 0.4158,
+ "step": 1583
+ },
+ {
+ "epoch": 4.9519832985386225,
+ "grad_norm": 0.7845875024795532,
+ "learning_rate": 3.972918693581712e-07,
+ "loss": 0.4428,
+ "step": 1584
+ },
+ {
+ "epoch": 4.955114822546973,
+ "grad_norm": 0.8758688569068909,
+ "learning_rate": 3.949530926162354e-07,
+ "loss": 0.4113,
+ "step": 1585
+ },
+ {
+ "epoch": 4.958246346555324,
+ "grad_norm": 0.797717809677124,
+ "learning_rate": 3.926206296165599e-07,
+ "loss": 0.4181,
+ "step": 1586
+ },
+ {
+ "epoch": 4.961377870563674,
+ "grad_norm": 0.7839358448982239,
+ "learning_rate": 3.9029448735498495e-07,
+ "loss": 0.4218,
+ "step": 1587
+ },
+ {
+ "epoch": 4.964509394572025,
+ "grad_norm": 0.8518044352531433,
+ "learning_rate": 3.8797467280839216e-07,
+ "loss": 0.4425,
+ "step": 1588
+ },
+ {
+ "epoch": 4.967640918580376,
+ "grad_norm": 0.7976752519607544,
+ "learning_rate": 3.856611929346837e-07,
+ "loss": 0.4736,
+ "step": 1589
+ },
+ {
+ "epoch": 4.970772442588727,
+ "grad_norm": 0.9000734090805054,
+ "learning_rate": 3.8335405467276184e-07,
+ "loss": 0.4282,
+ "step": 1590
+ },
+ {
+ "epoch": 4.973903966597077,
+ "grad_norm": 0.8424035906791687,
+ "learning_rate": 3.810532649425089e-07,
+ "loss": 0.4443,
+ "step": 1591
+ },
+ {
+ "epoch": 4.977035490605428,
+ "grad_norm": 0.8014306426048279,
+ "learning_rate": 3.7875883064476564e-07,
+ "loss": 0.4617,
+ "step": 1592
+ },
+ {
+ "epoch": 4.980167014613778,
+ "grad_norm": 0.7423905730247498,
+ "learning_rate": 3.76470758661312e-07,
+ "loss": 0.468,
+ "step": 1593
+ },
+ {
+ "epoch": 4.98329853862213,
+ "grad_norm": 0.7726851105690002,
+ "learning_rate": 3.741890558548414e-07,
+ "loss": 0.3824,
+ "step": 1594
+ },
+ {
+ "epoch": 4.98643006263048,
+ "grad_norm": 0.7915946245193481,
+ "learning_rate": 3.719137290689479e-07,
+ "loss": 0.4091,
+ "step": 1595
+ },
+ {
+ "epoch": 4.989561586638831,
+ "grad_norm": 0.8000501394271851,
+ "learning_rate": 3.696447851280999e-07,
+ "loss": 0.4523,
+ "step": 1596
+ },
+ {
+ "epoch": 4.992693110647181,
+ "grad_norm": 0.8696115612983704,
+ "learning_rate": 3.6738223083762235e-07,
+ "loss": 0.4383,
+ "step": 1597
+ },
+ {
+ "epoch": 4.995824634655532,
+ "grad_norm": 0.781044065952301,
+ "learning_rate": 3.65126072983675e-07,
+ "loss": 0.4506,
+ "step": 1598
+ },
+ {
+ "epoch": 4.9989561586638835,
+ "grad_norm": 0.7926247715950012,
+ "learning_rate": 3.628763183332315e-07,
+ "loss": 0.4322,
+ "step": 1599
+ },
+ {
+ "epoch": 5.0,
+ "grad_norm": 0.7926247715950012,
+ "learning_rate": 3.606329736340622e-07,
+ "loss": 0.1457,
+ "step": 1600
+ },
+ {
+ "epoch": 5.003131524008351,
+ "grad_norm": 0.7916404008865356,
+ "learning_rate": 3.583960456147109e-07,
+ "loss": 0.431,
+ "step": 1601
+ },
+ {
+ "epoch": 5.006263048016701,
+ "grad_norm": 0.7794471383094788,
+ "learning_rate": 3.5616554098447496e-07,
+ "loss": 0.4054,
+ "step": 1602
+ },
+ {
+ "epoch": 5.009394572025053,
+ "grad_norm": 0.8006277680397034,
+ "learning_rate": 3.5394146643338735e-07,
+ "loss": 0.4363,
+ "step": 1603
+ },
+ {
+ "epoch": 5.012526096033403,
+ "grad_norm": 0.9790255427360535,
+ "learning_rate": 3.5172382863219277e-07,
+ "loss": 0.4169,
+ "step": 1604
+ },
+ {
+ "epoch": 5.015657620041754,
+ "grad_norm": 1.0182846784591675,
+ "learning_rate": 3.4951263423233265e-07,
+ "loss": 0.4704,
+ "step": 1605
+ },
+ {
+ "epoch": 5.018789144050104,
+ "grad_norm": 0.7919366359710693,
+ "learning_rate": 3.47307889865921e-07,
+ "loss": 0.4567,
+ "step": 1606
+ },
+ {
+ "epoch": 5.021920668058455,
+ "grad_norm": 0.7617084383964539,
+ "learning_rate": 3.4510960214572565e-07,
+ "loss": 0.3865,
+ "step": 1607
+ },
+ {
+ "epoch": 5.025052192066806,
+ "grad_norm": 0.7226077318191528,
+ "learning_rate": 3.429177776651502e-07,
+ "loss": 0.3847,
+ "step": 1608
+ },
+ {
+ "epoch": 5.028183716075157,
+ "grad_norm": 0.9526088833808899,
+ "learning_rate": 3.407324229982112e-07,
+ "loss": 0.4116,
+ "step": 1609
+ },
+ {
+ "epoch": 5.031315240083507,
+ "grad_norm": 0.891015350818634,
+ "learning_rate": 3.385535446995214e-07,
+ "loss": 0.4026,
+ "step": 1610
+ },
+ {
+ "epoch": 5.034446764091858,
+ "grad_norm": 0.8143556714057922,
+ "learning_rate": 3.3638114930426735e-07,
+ "loss": 0.3868,
+ "step": 1611
+ },
+ {
+ "epoch": 5.0375782881002085,
+ "grad_norm": 0.7985808849334717,
+ "learning_rate": 3.3421524332819285e-07,
+ "loss": 0.4176,
+ "step": 1612
+ },
+ {
+ "epoch": 5.04070981210856,
+ "grad_norm": 0.7742509245872498,
+ "learning_rate": 3.320558332675772e-07,
+ "loss": 0.3822,
+ "step": 1613
+ },
+ {
+ "epoch": 5.04384133611691,
+ "grad_norm": 0.8089630603790283,
+ "learning_rate": 3.2990292559921533e-07,
+ "loss": 0.3907,
+ "step": 1614
+ },
+ {
+ "epoch": 5.046972860125261,
+ "grad_norm": 0.7523915767669678,
+ "learning_rate": 3.2775652678039974e-07,
+ "loss": 0.4232,
+ "step": 1615
+ },
+ {
+ "epoch": 5.050104384133611,
+ "grad_norm": 0.852554202079773,
+ "learning_rate": 3.2561664324890134e-07,
+ "loss": 0.4127,
+ "step": 1616
+ },
+ {
+ "epoch": 5.053235908141962,
+ "grad_norm": 0.8473255038261414,
+ "learning_rate": 3.2348328142294936e-07,
+ "loss": 0.4323,
+ "step": 1617
+ },
+ {
+ "epoch": 5.056367432150314,
+ "grad_norm": 0.8101459741592407,
+ "learning_rate": 3.213564477012121e-07,
+ "loss": 0.4208,
+ "step": 1618
+ },
+ {
+ "epoch": 5.059498956158664,
+ "grad_norm": 0.7732040882110596,
+ "learning_rate": 3.192361484627776e-07,
+ "loss": 0.4312,
+ "step": 1619
+ },
+ {
+ "epoch": 5.062630480167015,
+ "grad_norm": 0.7684810757637024,
+ "learning_rate": 3.171223900671344e-07,
+ "loss": 0.4223,
+ "step": 1620
+ },
+ {
+ "epoch": 5.065762004175365,
+ "grad_norm": 1.0066255331039429,
+ "learning_rate": 3.1501517885415364e-07,
+ "loss": 0.4284,
+ "step": 1621
+ },
+ {
+ "epoch": 5.068893528183716,
+ "grad_norm": 0.7965579032897949,
+ "learning_rate": 3.129145211440701e-07,
+ "loss": 0.4041,
+ "step": 1622
+ },
+ {
+ "epoch": 5.072025052192067,
+ "grad_norm": 0.7432489395141602,
+ "learning_rate": 3.1082042323746063e-07,
+ "loss": 0.3852,
+ "step": 1623
+ },
+ {
+ "epoch": 5.075156576200418,
+ "grad_norm": 0.894244909286499,
+ "learning_rate": 3.0873289141522714e-07,
+ "loss": 0.4479,
+ "step": 1624
+ },
+ {
+ "epoch": 5.078288100208768,
+ "grad_norm": 0.7385063767433167,
+ "learning_rate": 3.0665193193857887e-07,
+ "loss": 0.4048,
+ "step": 1625
+ },
+ {
+ "epoch": 5.081419624217119,
+ "grad_norm": 0.9110557436943054,
+ "learning_rate": 3.045775510490115e-07,
+ "loss": 0.4375,
+ "step": 1626
+ },
+ {
+ "epoch": 5.0845511482254695,
+ "grad_norm": 0.7882060408592224,
+ "learning_rate": 3.0250975496829067e-07,
+ "loss": 0.4421,
+ "step": 1627
+ },
+ {
+ "epoch": 5.087682672233821,
+ "grad_norm": 0.9643567800521851,
+ "learning_rate": 3.0044854989843027e-07,
+ "loss": 0.4161,
+ "step": 1628
+ },
+ {
+ "epoch": 5.090814196242171,
+ "grad_norm": 0.8286798000335693,
+ "learning_rate": 2.983939420216753e-07,
+ "loss": 0.4013,
+ "step": 1629
+ },
+ {
+ "epoch": 5.093945720250522,
+ "grad_norm": 0.7984923124313354,
+ "learning_rate": 2.9634593750048515e-07,
+ "loss": 0.3759,
+ "step": 1630
+ },
+ {
+ "epoch": 5.097077244258872,
+ "grad_norm": 0.7450951337814331,
+ "learning_rate": 2.9430454247751344e-07,
+ "loss": 0.4233,
+ "step": 1631
+ },
+ {
+ "epoch": 5.1002087682672235,
+ "grad_norm": 0.7780002355575562,
+ "learning_rate": 2.92269763075588e-07,
+ "loss": 0.4299,
+ "step": 1632
+ },
+ {
+ "epoch": 5.103340292275574,
+ "grad_norm": 0.7606906890869141,
+ "learning_rate": 2.9024160539769614e-07,
+ "loss": 0.42,
+ "step": 1633
+ },
+ {
+ "epoch": 5.106471816283925,
+ "grad_norm": 0.8596094250679016,
+ "learning_rate": 2.882200755269626e-07,
+ "loss": 0.3855,
+ "step": 1634
+ },
+ {
+ "epoch": 5.109603340292275,
+ "grad_norm": 0.7705757021903992,
+ "learning_rate": 2.8620517952663464e-07,
+ "loss": 0.4214,
+ "step": 1635
+ },
+ {
+ "epoch": 5.112734864300626,
+ "grad_norm": 0.9210963845252991,
+ "learning_rate": 2.8419692344006186e-07,
+ "loss": 0.4137,
+ "step": 1636
+ },
+ {
+ "epoch": 5.115866388308977,
+ "grad_norm": 0.8594221472740173,
+ "learning_rate": 2.821953132906777e-07,
+ "loss": 0.4409,
+ "step": 1637
+ },
+ {
+ "epoch": 5.118997912317328,
+ "grad_norm": 0.8145505785942078,
+ "learning_rate": 2.8020035508198374e-07,
+ "loss": 0.4206,
+ "step": 1638
+ },
+ {
+ "epoch": 5.122129436325679,
+ "grad_norm": 0.8511978983879089,
+ "learning_rate": 2.782120547975281e-07,
+ "loss": 0.4069,
+ "step": 1639
+ },
+ {
+ "epoch": 5.125260960334029,
+ "grad_norm": 0.7916121482849121,
+ "learning_rate": 2.762304184008918e-07,
+ "loss": 0.4053,
+ "step": 1640
+ },
+ {
+ "epoch": 5.12839248434238,
+ "grad_norm": 1.0378046035766602,
+ "learning_rate": 2.7425545183566686e-07,
+ "loss": 0.4058,
+ "step": 1641
+ },
+ {
+ "epoch": 5.1315240083507305,
+ "grad_norm": 0.7503740787506104,
+ "learning_rate": 2.722871610254413e-07,
+ "loss": 0.4146,
+ "step": 1642
+ },
+ {
+ "epoch": 5.134655532359082,
+ "grad_norm": 0.8860176801681519,
+ "learning_rate": 2.703255518737802e-07,
+ "loss": 0.4192,
+ "step": 1643
+ },
+ {
+ "epoch": 5.137787056367432,
+ "grad_norm": 0.7693402171134949,
+ "learning_rate": 2.683706302642075e-07,
+ "loss": 0.4222,
+ "step": 1644
+ },
+ {
+ "epoch": 5.140918580375783,
+ "grad_norm": 0.8174161911010742,
+ "learning_rate": 2.6642240206018884e-07,
+ "loss": 0.4267,
+ "step": 1645
+ },
+ {
+ "epoch": 5.144050104384133,
+ "grad_norm": 0.7899668216705322,
+ "learning_rate": 2.644808731051152e-07,
+ "loss": 0.3892,
+ "step": 1646
+ },
+ {
+ "epoch": 5.1471816283924845,
+ "grad_norm": 0.8949608206748962,
+ "learning_rate": 2.625460492222831e-07,
+ "loss": 0.3932,
+ "step": 1647
+ },
+ {
+ "epoch": 5.150313152400835,
+ "grad_norm": 0.841679036617279,
+ "learning_rate": 2.6061793621488e-07,
+ "loss": 0.426,
+ "step": 1648
+ },
+ {
+ "epoch": 5.153444676409186,
+ "grad_norm": 0.8701457977294922,
+ "learning_rate": 2.5869653986596297e-07,
+ "loss": 0.4113,
+ "step": 1649
+ },
+ {
+ "epoch": 5.156576200417536,
+ "grad_norm": 0.8086332678794861,
+ "learning_rate": 2.567818659384441e-07,
+ "loss": 0.4324,
+ "step": 1650
+ },
+ {
+ "epoch": 5.159707724425887,
+ "grad_norm": 0.8881601095199585,
+ "learning_rate": 2.548739201750741e-07,
+ "loss": 0.4159,
+ "step": 1651
+ },
+ {
+ "epoch": 5.162839248434238,
+ "grad_norm": 0.7634018659591675,
+ "learning_rate": 2.529727082984229e-07,
+ "loss": 0.3897,
+ "step": 1652
+ },
+ {
+ "epoch": 5.165970772442589,
+ "grad_norm": 0.8098715543746948,
+ "learning_rate": 2.510782360108624e-07,
+ "loss": 0.42,
+ "step": 1653
+ },
+ {
+ "epoch": 5.16910229645094,
+ "grad_norm": 0.9650408029556274,
+ "learning_rate": 2.491905089945515e-07,
+ "loss": 0.4377,
+ "step": 1654
+ },
+ {
+ "epoch": 5.17223382045929,
+ "grad_norm": 0.8331677317619324,
+ "learning_rate": 2.473095329114164e-07,
+ "loss": 0.4066,
+ "step": 1655
+ },
+ {
+ "epoch": 5.175365344467641,
+ "grad_norm": 0.8330480456352234,
+ "learning_rate": 2.454353134031362e-07,
+ "loss": 0.4159,
+ "step": 1656
+ },
+ {
+ "epoch": 5.1784968684759916,
+ "grad_norm": 0.7608641386032104,
+ "learning_rate": 2.435678560911245e-07,
+ "loss": 0.4265,
+ "step": 1657
+ },
+ {
+ "epoch": 5.181628392484343,
+ "grad_norm": 0.8353298306465149,
+ "learning_rate": 2.417071665765122e-07,
+ "loss": 0.4457,
+ "step": 1658
+ },
+ {
+ "epoch": 5.184759916492693,
+ "grad_norm": 0.8479558825492859,
+ "learning_rate": 2.398532504401324e-07,
+ "loss": 0.4121,
+ "step": 1659
+ },
+ {
+ "epoch": 5.187891440501044,
+ "grad_norm": 0.914951503276825,
+ "learning_rate": 2.3800611324250101e-07,
+ "loss": 0.3989,
+ "step": 1660
+ },
+ {
+ "epoch": 5.191022964509394,
+ "grad_norm": 0.7943673133850098,
+ "learning_rate": 2.3616576052380324e-07,
+ "loss": 0.4235,
+ "step": 1661
+ },
+ {
+ "epoch": 5.1941544885177455,
+ "grad_norm": 0.8725146055221558,
+ "learning_rate": 2.34332197803874e-07,
+ "loss": 0.4337,
+ "step": 1662
+ },
+ {
+ "epoch": 5.197286012526096,
+ "grad_norm": 0.7821521162986755,
+ "learning_rate": 2.325054305821836e-07,
+ "loss": 0.4089,
+ "step": 1663
+ },
+ {
+ "epoch": 5.200417536534447,
+ "grad_norm": 0.8576532602310181,
+ "learning_rate": 2.3068546433782063e-07,
+ "loss": 0.4292,
+ "step": 1664
+ },
+ {
+ "epoch": 5.203549060542797,
+ "grad_norm": 0.9175107479095459,
+ "learning_rate": 2.2887230452947384e-07,
+ "loss": 0.4333,
+ "step": 1665
+ },
+ {
+ "epoch": 5.206680584551148,
+ "grad_norm": 0.8123425245285034,
+ "learning_rate": 2.270659565954189e-07,
+ "loss": 0.448,
+ "step": 1666
+ },
+ {
+ "epoch": 5.209812108559499,
+ "grad_norm": 0.7988130450248718,
+ "learning_rate": 2.2526642595349867e-07,
+ "loss": 0.3868,
+ "step": 1667
+ },
+ {
+ "epoch": 5.21294363256785,
+ "grad_norm": 0.7928944230079651,
+ "learning_rate": 2.234737180011104e-07,
+ "loss": 0.4048,
+ "step": 1668
+ },
+ {
+ "epoch": 5.2160751565762,
+ "grad_norm": 0.8115013837814331,
+ "learning_rate": 2.2168783811518712e-07,
+ "loss": 0.3962,
+ "step": 1669
+ },
+ {
+ "epoch": 5.219206680584551,
+ "grad_norm": 0.8003987073898315,
+ "learning_rate": 2.1990879165218164e-07,
+ "loss": 0.4315,
+ "step": 1670
+ },
+ {
+ "epoch": 5.222338204592901,
+ "grad_norm": 0.8416216373443604,
+ "learning_rate": 2.1813658394805093e-07,
+ "loss": 0.4344,
+ "step": 1671
+ },
+ {
+ "epoch": 5.225469728601253,
+ "grad_norm": 0.8418934941291809,
+ "learning_rate": 2.1637122031824137e-07,
+ "loss": 0.4099,
+ "step": 1672
+ },
+ {
+ "epoch": 5.228601252609604,
+ "grad_norm": 0.907768726348877,
+ "learning_rate": 2.146127060576711e-07,
+ "loss": 0.4316,
+ "step": 1673
+ },
+ {
+ "epoch": 5.231732776617954,
+ "grad_norm": 0.8959410786628723,
+ "learning_rate": 2.1286104644071548e-07,
+ "loss": 0.4037,
+ "step": 1674
+ },
+ {
+ "epoch": 5.234864300626305,
+ "grad_norm": 0.9213347434997559,
+ "learning_rate": 2.1111624672118774e-07,
+ "loss": 0.4207,
+ "step": 1675
+ },
+ {
+ "epoch": 5.237995824634655,
+ "grad_norm": 0.8055871725082397,
+ "learning_rate": 2.0937831213232894e-07,
+ "loss": 0.4432,
+ "step": 1676
+ },
+ {
+ "epoch": 5.241127348643007,
+ "grad_norm": 1.0330266952514648,
+ "learning_rate": 2.076472478867883e-07,
+ "loss": 0.4398,
+ "step": 1677
+ },
+ {
+ "epoch": 5.244258872651357,
+ "grad_norm": 0.8400726914405823,
+ "learning_rate": 2.0592305917660898e-07,
+ "loss": 0.4617,
+ "step": 1678
+ },
+ {
+ "epoch": 5.247390396659708,
+ "grad_norm": 0.8515186905860901,
+ "learning_rate": 2.0420575117321124e-07,
+ "loss": 0.4366,
+ "step": 1679
+ },
+ {
+ "epoch": 5.250521920668058,
+ "grad_norm": 0.7332753539085388,
+ "learning_rate": 2.0249532902737734e-07,
+ "loss": 0.4142,
+ "step": 1680
+ },
+ {
+ "epoch": 5.253653444676409,
+ "grad_norm": 0.7820742130279541,
+ "learning_rate": 2.0079179786923862e-07,
+ "loss": 0.4278,
+ "step": 1681
+ },
+ {
+ "epoch": 5.25678496868476,
+ "grad_norm": 0.7951577305793762,
+ "learning_rate": 1.9909516280825687e-07,
+ "loss": 0.4393,
+ "step": 1682
+ },
+ {
+ "epoch": 5.259916492693111,
+ "grad_norm": 0.9419519305229187,
+ "learning_rate": 1.9740542893320892e-07,
+ "loss": 0.43,
+ "step": 1683
+ },
+ {
+ "epoch": 5.263048016701461,
+ "grad_norm": 0.837685227394104,
+ "learning_rate": 1.9572260131217547e-07,
+ "loss": 0.3954,
+ "step": 1684
+ },
+ {
+ "epoch": 5.266179540709812,
+ "grad_norm": 2.381927013397217,
+ "learning_rate": 1.940466849925199e-07,
+ "loss": 0.4168,
+ "step": 1685
+ },
+ {
+ "epoch": 5.2693110647181625,
+ "grad_norm": 0.8350804448127747,
+ "learning_rate": 1.923776850008785e-07,
+ "loss": 0.4456,
+ "step": 1686
+ },
+ {
+ "epoch": 5.272442588726514,
+ "grad_norm": 0.835779070854187,
+ "learning_rate": 1.9071560634314313e-07,
+ "loss": 0.4207,
+ "step": 1687
+ },
+ {
+ "epoch": 5.275574112734864,
+ "grad_norm": 0.794666588306427,
+ "learning_rate": 1.8906045400444412e-07,
+ "loss": 0.4318,
+ "step": 1688
+ },
+ {
+ "epoch": 5.278705636743215,
+ "grad_norm": 0.780330240726471,
+ "learning_rate": 1.874122329491404e-07,
+ "loss": 0.4024,
+ "step": 1689
+ },
+ {
+ "epoch": 5.281837160751566,
+ "grad_norm": 0.8204572796821594,
+ "learning_rate": 1.8577094812079894e-07,
+ "loss": 0.431,
+ "step": 1690
+ },
+ {
+ "epoch": 5.284968684759916,
+ "grad_norm": 0.8642128705978394,
+ "learning_rate": 1.8413660444218468e-07,
+ "loss": 0.4083,
+ "step": 1691
+ },
+ {
+ "epoch": 5.288100208768268,
+ "grad_norm": 0.818459689617157,
+ "learning_rate": 1.825092068152423e-07,
+ "loss": 0.4101,
+ "step": 1692
+ },
+ {
+ "epoch": 5.291231732776618,
+ "grad_norm": 0.8250467777252197,
+ "learning_rate": 1.8088876012108376e-07,
+ "loss": 0.4261,
+ "step": 1693
+ },
+ {
+ "epoch": 5.294363256784969,
+ "grad_norm": 1.001344919204712,
+ "learning_rate": 1.7927526921997312e-07,
+ "loss": 0.4107,
+ "step": 1694
+ },
+ {
+ "epoch": 5.297494780793319,
+ "grad_norm": 0.8144769668579102,
+ "learning_rate": 1.776687389513107e-07,
+ "loss": 0.42,
+ "step": 1695
+ },
+ {
+ "epoch": 5.30062630480167,
+ "grad_norm": 0.8407689332962036,
+ "learning_rate": 1.7606917413361956e-07,
+ "loss": 0.4483,
+ "step": 1696
+ },
+ {
+ "epoch": 5.303757828810021,
+ "grad_norm": 0.8678401112556458,
+ "learning_rate": 1.7447657956453228e-07,
+ "loss": 0.4815,
+ "step": 1697
+ },
+ {
+ "epoch": 5.306889352818372,
+ "grad_norm": 0.7716734409332275,
+ "learning_rate": 1.7289096002077398e-07,
+ "loss": 0.4267,
+ "step": 1698
+ },
+ {
+ "epoch": 5.310020876826722,
+ "grad_norm": 0.9155471920967102,
+ "learning_rate": 1.713123202581507e-07,
+ "loss": 0.4544,
+ "step": 1699
+ },
+ {
+ "epoch": 5.313152400835073,
+ "grad_norm": 0.7895417213439941,
+ "learning_rate": 1.6974066501153268e-07,
+ "loss": 0.4166,
+ "step": 1700
+ },
+ {
+ "epoch": 5.3162839248434235,
+ "grad_norm": 0.8002907037734985,
+ "learning_rate": 1.681759989948409e-07,
+ "loss": 0.43,
+ "step": 1701
+ },
+ {
+ "epoch": 5.319415448851775,
+ "grad_norm": 0.8847978115081787,
+ "learning_rate": 1.6661832690103435e-07,
+ "loss": 0.4231,
+ "step": 1702
+ },
+ {
+ "epoch": 5.322546972860125,
+ "grad_norm": 0.8419442176818848,
+ "learning_rate": 1.650676534020948e-07,
+ "loss": 0.4103,
+ "step": 1703
+ },
+ {
+ "epoch": 5.325678496868476,
+ "grad_norm": 0.8390375375747681,
+ "learning_rate": 1.6352398314901236e-07,
+ "loss": 0.424,
+ "step": 1704
+ },
+ {
+ "epoch": 5.328810020876826,
+ "grad_norm": 0.8126906156539917,
+ "learning_rate": 1.6198732077177203e-07,
+ "loss": 0.397,
+ "step": 1705
+ },
+ {
+ "epoch": 5.3319415448851775,
+ "grad_norm": 0.7679522633552551,
+ "learning_rate": 1.6045767087934012e-07,
+ "loss": 0.4669,
+ "step": 1706
+ },
+ {
+ "epoch": 5.335073068893529,
+ "grad_norm": 0.8407344818115234,
+ "learning_rate": 1.589350380596505e-07,
+ "loss": 0.4046,
+ "step": 1707
+ },
+ {
+ "epoch": 5.338204592901879,
+ "grad_norm": 0.7537122368812561,
+ "learning_rate": 1.5741942687959006e-07,
+ "loss": 0.4431,
+ "step": 1708
+ },
+ {
+ "epoch": 5.34133611691023,
+ "grad_norm": 0.8342767953872681,
+ "learning_rate": 1.559108418849853e-07,
+ "loss": 0.4134,
+ "step": 1709
+ },
+ {
+ "epoch": 5.34446764091858,
+ "grad_norm": 0.8210666179656982,
+ "learning_rate": 1.5440928760058842e-07,
+ "loss": 0.4029,
+ "step": 1710
+ },
+ {
+ "epoch": 5.347599164926931,
+ "grad_norm": 0.7971834540367126,
+ "learning_rate": 1.5291476853006527e-07,
+ "loss": 0.42,
+ "step": 1711
+ },
+ {
+ "epoch": 5.350730688935282,
+ "grad_norm": 0.7891254425048828,
+ "learning_rate": 1.5142728915598065e-07,
+ "loss": 0.4341,
+ "step": 1712
+ },
+ {
+ "epoch": 5.353862212943633,
+ "grad_norm": 0.8925033211708069,
+ "learning_rate": 1.499468539397836e-07,
+ "loss": 0.4396,
+ "step": 1713
+ },
+ {
+ "epoch": 5.356993736951983,
+ "grad_norm": 0.8453183174133301,
+ "learning_rate": 1.4847346732179717e-07,
+ "loss": 0.4176,
+ "step": 1714
+ },
+ {
+ "epoch": 5.360125260960334,
+ "grad_norm": 0.8093660473823547,
+ "learning_rate": 1.4700713372120172e-07,
+ "loss": 0.3894,
+ "step": 1715
+ },
+ {
+ "epoch": 5.3632567849686845,
+ "grad_norm": 0.7943024635314941,
+ "learning_rate": 1.455478575360239e-07,
+ "loss": 0.3693,
+ "step": 1716
+ },
+ {
+ "epoch": 5.366388308977036,
+ "grad_norm": 0.8238469958305359,
+ "learning_rate": 1.4409564314312313e-07,
+ "loss": 0.4362,
+ "step": 1717
+ },
+ {
+ "epoch": 5.369519832985386,
+ "grad_norm": 0.943553626537323,
+ "learning_rate": 1.4265049489817774e-07,
+ "loss": 0.4461,
+ "step": 1718
+ },
+ {
+ "epoch": 5.372651356993737,
+ "grad_norm": 1.0485329627990723,
+ "learning_rate": 1.412124171356727e-07,
+ "loss": 0.4278,
+ "step": 1719
+ },
+ {
+ "epoch": 5.375782881002087,
+ "grad_norm": 0.807830810546875,
+ "learning_rate": 1.3978141416888497e-07,
+ "loss": 0.406,
+ "step": 1720
+ },
+ {
+ "epoch": 5.3789144050104385,
+ "grad_norm": 0.8568108081817627,
+ "learning_rate": 1.3835749028987405e-07,
+ "loss": 0.449,
+ "step": 1721
+ },
+ {
+ "epoch": 5.382045929018789,
+ "grad_norm": 0.7883656024932861,
+ "learning_rate": 1.3694064976946448e-07,
+ "loss": 0.4199,
+ "step": 1722
+ },
+ {
+ "epoch": 5.38517745302714,
+ "grad_norm": 0.7353293299674988,
+ "learning_rate": 1.355308968572372e-07,
+ "loss": 0.4175,
+ "step": 1723
+ },
+ {
+ "epoch": 5.388308977035491,
+ "grad_norm": 0.7996500134468079,
+ "learning_rate": 1.3412823578151434e-07,
+ "loss": 0.4159,
+ "step": 1724
+ },
+ {
+ "epoch": 5.391440501043841,
+ "grad_norm": 0.7814679145812988,
+ "learning_rate": 1.327326707493473e-07,
+ "loss": 0.4024,
+ "step": 1725
+ },
+ {
+ "epoch": 5.3945720250521925,
+ "grad_norm": 0.7922082543373108,
+ "learning_rate": 1.3134420594650364e-07,
+ "loss": 0.4466,
+ "step": 1726
+ },
+ {
+ "epoch": 5.397703549060543,
+ "grad_norm": 1.122597098350525,
+ "learning_rate": 1.2996284553745575e-07,
+ "loss": 0.4318,
+ "step": 1727
+ },
+ {
+ "epoch": 5.400835073068894,
+ "grad_norm": 0.7850015163421631,
+ "learning_rate": 1.285885936653672e-07,
+ "loss": 0.4284,
+ "step": 1728
+ },
+ {
+ "epoch": 5.403966597077244,
+ "grad_norm": 0.8766868710517883,
+ "learning_rate": 1.2722145445208057e-07,
+ "loss": 0.4119,
+ "step": 1729
+ },
+ {
+ "epoch": 5.407098121085595,
+ "grad_norm": 0.8224661350250244,
+ "learning_rate": 1.2586143199810553e-07,
+ "loss": 0.438,
+ "step": 1730
+ },
+ {
+ "epoch": 5.4102296450939455,
+ "grad_norm": 0.9190678000450134,
+ "learning_rate": 1.245085303826049e-07,
+ "loss": 0.4196,
+ "step": 1731
+ },
+ {
+ "epoch": 5.413361169102297,
+ "grad_norm": 0.8007112145423889,
+ "learning_rate": 1.2316275366338586e-07,
+ "loss": 0.428,
+ "step": 1732
+ },
+ {
+ "epoch": 5.416492693110647,
+ "grad_norm": 0.8234992623329163,
+ "learning_rate": 1.2182410587688403e-07,
+ "loss": 0.4542,
+ "step": 1733
+ },
+ {
+ "epoch": 5.419624217118998,
+ "grad_norm": 0.8217283487319946,
+ "learning_rate": 1.2049259103815353e-07,
+ "loss": 0.4475,
+ "step": 1734
+ },
+ {
+ "epoch": 5.422755741127348,
+ "grad_norm": 0.8184598684310913,
+ "learning_rate": 1.1916821314085448e-07,
+ "loss": 0.4142,
+ "step": 1735
+ },
+ {
+ "epoch": 5.4258872651356995,
+ "grad_norm": 0.7703188061714172,
+ "learning_rate": 1.178509761572405e-07,
+ "loss": 0.4012,
+ "step": 1736
+ },
+ {
+ "epoch": 5.42901878914405,
+ "grad_norm": 0.7523705959320068,
+ "learning_rate": 1.1654088403814712e-07,
+ "loss": 0.3856,
+ "step": 1737
+ },
+ {
+ "epoch": 5.432150313152401,
+ "grad_norm": 0.8626607656478882,
+ "learning_rate": 1.1523794071298139e-07,
+ "loss": 0.3987,
+ "step": 1738
+ },
+ {
+ "epoch": 5.435281837160751,
+ "grad_norm": 0.7535544037818909,
+ "learning_rate": 1.1394215008970672e-07,
+ "loss": 0.4301,
+ "step": 1739
+ },
+ {
+ "epoch": 5.438413361169102,
+ "grad_norm": 0.7843238711357117,
+ "learning_rate": 1.1265351605483477e-07,
+ "loss": 0.3988,
+ "step": 1740
+ },
+ {
+ "epoch": 5.4415448851774535,
+ "grad_norm": 0.750119149684906,
+ "learning_rate": 1.1137204247341077e-07,
+ "loss": 0.394,
+ "step": 1741
+ },
+ {
+ "epoch": 5.444676409185804,
+ "grad_norm": 0.7726014852523804,
+ "learning_rate": 1.1009773318900518e-07,
+ "loss": 0.4411,
+ "step": 1742
+ },
+ {
+ "epoch": 5.447807933194154,
+ "grad_norm": 0.8338330984115601,
+ "learning_rate": 1.0883059202369816e-07,
+ "loss": 0.4115,
+ "step": 1743
+ },
+ {
+ "epoch": 5.450939457202505,
+ "grad_norm": 0.8153620958328247,
+ "learning_rate": 1.0757062277807206e-07,
+ "loss": 0.4479,
+ "step": 1744
+ },
+ {
+ "epoch": 5.454070981210856,
+ "grad_norm": 0.8071407079696655,
+ "learning_rate": 1.0631782923119782e-07,
+ "loss": 0.4279,
+ "step": 1745
+ },
+ {
+ "epoch": 5.457202505219207,
+ "grad_norm": 0.9648112654685974,
+ "learning_rate": 1.050722151406236e-07,
+ "loss": 0.4216,
+ "step": 1746
+ },
+ {
+ "epoch": 5.460334029227558,
+ "grad_norm": 0.8332387804985046,
+ "learning_rate": 1.038337842423634e-07,
+ "loss": 0.4021,
+ "step": 1747
+ },
+ {
+ "epoch": 5.463465553235908,
+ "grad_norm": 0.747900128364563,
+ "learning_rate": 1.0260254025088818e-07,
+ "loss": 0.4268,
+ "step": 1748
+ },
+ {
+ "epoch": 5.466597077244259,
+ "grad_norm": 0.7799843549728394,
+ "learning_rate": 1.013784868591114e-07,
+ "loss": 0.4241,
+ "step": 1749
+ },
+ {
+ "epoch": 5.469728601252609,
+ "grad_norm": 0.8218986988067627,
+ "learning_rate": 1.0016162773838045e-07,
+ "loss": 0.4348,
+ "step": 1750
+ },
+ {
+ "epoch": 5.4728601252609606,
+ "grad_norm": 0.8404920101165771,
+ "learning_rate": 9.895196653846417e-08,
+ "loss": 0.4649,
+ "step": 1751
+ },
+ {
+ "epoch": 5.475991649269311,
+ "grad_norm": 0.8962766528129578,
+ "learning_rate": 9.774950688754197e-08,
+ "loss": 0.4226,
+ "step": 1752
+ },
+ {
+ "epoch": 5.479123173277662,
+ "grad_norm": 0.7923446893692017,
+ "learning_rate": 9.655425239219446e-08,
+ "loss": 0.4602,
+ "step": 1753
+ },
+ {
+ "epoch": 5.482254697286012,
+ "grad_norm": 0.8076177835464478,
+ "learning_rate": 9.536620663739143e-08,
+ "loss": 0.4244,
+ "step": 1754
+ },
+ {
+ "epoch": 5.485386221294363,
+ "grad_norm": 0.8613052368164062,
+ "learning_rate": 9.418537318648057e-08,
+ "loss": 0.4146,
+ "step": 1755
+ },
+ {
+ "epoch": 5.488517745302714,
+ "grad_norm": 0.7825137376785278,
+ "learning_rate": 9.301175558117743e-08,
+ "loss": 0.4217,
+ "step": 1756
+ },
+ {
+ "epoch": 5.491649269311065,
+ "grad_norm": 0.9981064796447754,
+ "learning_rate": 9.184535734155509e-08,
+ "loss": 0.4339,
+ "step": 1757
+ },
+ {
+ "epoch": 5.494780793319415,
+ "grad_norm": 1.1833750009536743,
+ "learning_rate": 9.068618196603401e-08,
+ "loss": 0.4064,
+ "step": 1758
+ },
+ {
+ "epoch": 5.497912317327766,
+ "grad_norm": 0.7761838436126709,
+ "learning_rate": 8.953423293137004e-08,
+ "loss": 0.4289,
+ "step": 1759
+ },
+ {
+ "epoch": 5.5010438413361165,
+ "grad_norm": 0.8973391652107239,
+ "learning_rate": 8.838951369264492e-08,
+ "loss": 0.4289,
+ "step": 1760
+ },
+ {
+ "epoch": 5.504175365344468,
+ "grad_norm": 0.7715950608253479,
+ "learning_rate": 8.725202768325558e-08,
+ "loss": 0.4468,
+ "step": 1761
+ },
+ {
+ "epoch": 5.507306889352819,
+ "grad_norm": 0.7941539883613586,
+ "learning_rate": 8.612177831490459e-08,
+ "loss": 0.4296,
+ "step": 1762
+ },
+ {
+ "epoch": 5.510438413361169,
+ "grad_norm": 0.7699092030525208,
+ "learning_rate": 8.49987689775894e-08,
+ "loss": 0.4121,
+ "step": 1763
+ },
+ {
+ "epoch": 5.51356993736952,
+ "grad_norm": 1.026188850402832,
+ "learning_rate": 8.38830030395918e-08,
+ "loss": 0.4293,
+ "step": 1764
+ },
+ {
+ "epoch": 5.51670146137787,
+ "grad_norm": 0.7858873605728149,
+ "learning_rate": 8.277448384746839e-08,
+ "loss": 0.431,
+ "step": 1765
+ },
+ {
+ "epoch": 5.519832985386222,
+ "grad_norm": 0.7994810342788696,
+ "learning_rate": 8.167321472604017e-08,
+ "loss": 0.4263,
+ "step": 1766
+ },
+ {
+ "epoch": 5.522964509394572,
+ "grad_norm": 0.7905458807945251,
+ "learning_rate": 8.057919897838301e-08,
+ "loss": 0.4203,
+ "step": 1767
+ },
+ {
+ "epoch": 5.526096033402923,
+ "grad_norm": 0.7660766839981079,
+ "learning_rate": 7.949243988581767e-08,
+ "loss": 0.4495,
+ "step": 1768
+ },
+ {
+ "epoch": 5.529227557411273,
+ "grad_norm": 0.8373368382453918,
+ "learning_rate": 7.841294070789873e-08,
+ "loss": 0.4132,
+ "step": 1769
+ },
+ {
+ "epoch": 5.532359081419624,
+ "grad_norm": 0.8575682044029236,
+ "learning_rate": 7.734070468240735e-08,
+ "loss": 0.4139,
+ "step": 1770
+ },
+ {
+ "epoch": 5.535490605427975,
+ "grad_norm": 0.8791792392730713,
+ "learning_rate": 7.627573502533847e-08,
+ "loss": 0.4256,
+ "step": 1771
+ },
+ {
+ "epoch": 5.538622129436326,
+ "grad_norm": 0.7887570261955261,
+ "learning_rate": 7.521803493089453e-08,
+ "loss": 0.4227,
+ "step": 1772
+ },
+ {
+ "epoch": 5.541753653444676,
+ "grad_norm": 0.7709775567054749,
+ "learning_rate": 7.41676075714723e-08,
+ "loss": 0.4127,
+ "step": 1773
+ },
+ {
+ "epoch": 5.544885177453027,
+ "grad_norm": 0.9252105355262756,
+ "learning_rate": 7.312445609765628e-08,
+ "loss": 0.4128,
+ "step": 1774
+ },
+ {
+ "epoch": 5.5480167014613775,
+ "grad_norm": 0.8159840703010559,
+ "learning_rate": 7.20885836382082e-08,
+ "loss": 0.4344,
+ "step": 1775
+ },
+ {
+ "epoch": 5.551148225469729,
+ "grad_norm": 0.8233450651168823,
+ "learning_rate": 7.105999330005691e-08,
+ "loss": 0.4146,
+ "step": 1776
+ },
+ {
+ "epoch": 5.554279749478079,
+ "grad_norm": 0.8269393444061279,
+ "learning_rate": 7.00386881682899e-08,
+ "loss": 0.4379,
+ "step": 1777
+ },
+ {
+ "epoch": 5.55741127348643,
+ "grad_norm": 0.8312438726425171,
+ "learning_rate": 6.90246713061446e-08,
+ "loss": 0.441,
+ "step": 1778
+ },
+ {
+ "epoch": 5.560542797494781,
+ "grad_norm": 0.7896298766136169,
+ "learning_rate": 6.80179457549976e-08,
+ "loss": 0.4102,
+ "step": 1779
+ },
+ {
+ "epoch": 5.5636743215031315,
+ "grad_norm": 0.942116379737854,
+ "learning_rate": 6.701851453435715e-08,
+ "loss": 0.4141,
+ "step": 1780
+ },
+ {
+ "epoch": 5.566805845511483,
+ "grad_norm": 0.8050079941749573,
+ "learning_rate": 6.602638064185262e-08,
+ "loss": 0.4087,
+ "step": 1781
+ },
+ {
+ "epoch": 5.569937369519833,
+ "grad_norm": 0.7318202257156372,
+ "learning_rate": 6.504154705322646e-08,
+ "loss": 0.4179,
+ "step": 1782
+ },
+ {
+ "epoch": 5.573068893528184,
+ "grad_norm": 0.8148002624511719,
+ "learning_rate": 6.406401672232527e-08,
+ "loss": 0.4439,
+ "step": 1783
+ },
+ {
+ "epoch": 5.576200417536534,
+ "grad_norm": 0.8027870655059814,
+ "learning_rate": 6.309379258109093e-08,
+ "loss": 0.4291,
+ "step": 1784
+ },
+ {
+ "epoch": 5.579331941544885,
+ "grad_norm": 0.8059787154197693,
+ "learning_rate": 6.213087753955127e-08,
+ "loss": 0.4184,
+ "step": 1785
+ },
+ {
+ "epoch": 5.582463465553236,
+ "grad_norm": 0.8198132514953613,
+ "learning_rate": 6.117527448581101e-08,
+ "loss": 0.4197,
+ "step": 1786
+ },
+ {
+ "epoch": 5.585594989561587,
+ "grad_norm": 0.8053491115570068,
+ "learning_rate": 6.022698628604523e-08,
+ "loss": 0.4202,
+ "step": 1787
+ },
+ {
+ "epoch": 5.588726513569937,
+ "grad_norm": 0.7784984707832336,
+ "learning_rate": 5.928601578448823e-08,
+ "loss": 0.4169,
+ "step": 1788
+ },
+ {
+ "epoch": 5.591858037578288,
+ "grad_norm": 0.797520101070404,
+ "learning_rate": 5.835236580342657e-08,
+ "loss": 0.3941,
+ "step": 1789
+ },
+ {
+ "epoch": 5.5949895615866385,
+ "grad_norm": 0.8359535932540894,
+ "learning_rate": 5.742603914318995e-08,
+ "loss": 0.4393,
+ "step": 1790
+ },
+ {
+ "epoch": 5.59812108559499,
+ "grad_norm": 0.7943177223205566,
+ "learning_rate": 5.6507038582142574e-08,
+ "loss": 0.3962,
+ "step": 1791
+ },
+ {
+ "epoch": 5.60125260960334,
+ "grad_norm": 0.7825118899345398,
+ "learning_rate": 5.559536687667566e-08,
+ "loss": 0.4247,
+ "step": 1792
+ },
+ {
+ "epoch": 5.604384133611691,
+ "grad_norm": 0.771355390548706,
+ "learning_rate": 5.469102676119942e-08,
+ "loss": 0.4542,
+ "step": 1793
+ },
+ {
+ "epoch": 5.607515657620041,
+ "grad_norm": 0.8478569984436035,
+ "learning_rate": 5.379402094813246e-08,
+ "loss": 0.4225,
+ "step": 1794
+ },
+ {
+ "epoch": 5.6106471816283925,
+ "grad_norm": 1.0207805633544922,
+ "learning_rate": 5.2904352127896855e-08,
+ "loss": 0.433,
+ "step": 1795
+ },
+ {
+ "epoch": 5.613778705636744,
+ "grad_norm": 0.868712842464447,
+ "learning_rate": 5.2022022968907794e-08,
+ "loss": 0.418,
+ "step": 1796
+ },
+ {
+ "epoch": 5.616910229645094,
+ "grad_norm": 0.8502968549728394,
+ "learning_rate": 5.114703611756672e-08,
+ "loss": 0.3906,
+ "step": 1797
+ },
+ {
+ "epoch": 5.620041753653445,
+ "grad_norm": 0.8621317148208618,
+ "learning_rate": 5.027939419825295e-08,
+ "loss": 0.4502,
+ "step": 1798
+ },
+ {
+ "epoch": 5.623173277661795,
+ "grad_norm": 0.7986813187599182,
+ "learning_rate": 4.941909981331539e-08,
+ "loss": 0.4507,
+ "step": 1799
+ },
+ {
+ "epoch": 5.6263048016701465,
+ "grad_norm": 0.7599334716796875,
+ "learning_rate": 4.856615554306637e-08,
+ "loss": 0.4249,
+ "step": 1800
+ },
+ {
+ "epoch": 5.629436325678497,
+ "grad_norm": 0.854983389377594,
+ "learning_rate": 4.7720563945771456e-08,
+ "loss": 0.3809,
+ "step": 1801
+ },
+ {
+ "epoch": 5.632567849686848,
+ "grad_norm": 0.7154690027236938,
+ "learning_rate": 4.688232755764438e-08,
+ "loss": 0.4397,
+ "step": 1802
+ },
+ {
+ "epoch": 5.635699373695198,
+ "grad_norm": 0.7829597592353821,
+ "learning_rate": 4.605144889283652e-08,
+ "loss": 0.4104,
+ "step": 1803
+ },
+ {
+ "epoch": 5.638830897703549,
+ "grad_norm": 0.8672685623168945,
+ "learning_rate": 4.5227930443432485e-08,
+ "loss": 0.397,
+ "step": 1804
+ },
+ {
+ "epoch": 5.6419624217118995,
+ "grad_norm": 0.8553320169448853,
+ "learning_rate": 4.441177467944035e-08,
+ "loss": 0.447,
+ "step": 1805
+ },
+ {
+ "epoch": 5.645093945720251,
+ "grad_norm": 0.8556696176528931,
+ "learning_rate": 4.3602984048785324e-08,
+ "loss": 0.4379,
+ "step": 1806
+ },
+ {
+ "epoch": 5.648225469728601,
+ "grad_norm": 0.8407424688339233,
+ "learning_rate": 4.280156097730192e-08,
+ "loss": 0.4257,
+ "step": 1807
+ },
+ {
+ "epoch": 5.651356993736952,
+ "grad_norm": 0.8491048812866211,
+ "learning_rate": 4.200750786872682e-08,
+ "loss": 0.4181,
+ "step": 1808
+ },
+ {
+ "epoch": 5.654488517745302,
+ "grad_norm": 0.8149058222770691,
+ "learning_rate": 4.1220827104691826e-08,
+ "loss": 0.4265,
+ "step": 1809
+ },
+ {
+ "epoch": 5.6576200417536535,
+ "grad_norm": 0.7961820960044861,
+ "learning_rate": 4.044152104471705e-08,
+ "loss": 0.3905,
+ "step": 1810
+ },
+ {
+ "epoch": 5.660751565762004,
+ "grad_norm": 0.781095564365387,
+ "learning_rate": 3.966959202620274e-08,
+ "loss": 0.4311,
+ "step": 1811
+ },
+ {
+ "epoch": 5.663883089770355,
+ "grad_norm": 0.8125353455543518,
+ "learning_rate": 3.890504236442272e-08,
+ "loss": 0.4112,
+ "step": 1812
+ },
+ {
+ "epoch": 5.667014613778706,
+ "grad_norm": 0.7369498610496521,
+ "learning_rate": 3.814787435251821e-08,
+ "loss": 0.3749,
+ "step": 1813
+ },
+ {
+ "epoch": 5.670146137787056,
+ "grad_norm": 0.7556062936782837,
+ "learning_rate": 3.739809026149066e-08,
+ "loss": 0.4301,
+ "step": 1814
+ },
+ {
+ "epoch": 5.673277661795407,
+ "grad_norm": 0.7914175391197205,
+ "learning_rate": 3.665569234019367e-08,
+ "loss": 0.403,
+ "step": 1815
+ },
+ {
+ "epoch": 5.676409185803758,
+ "grad_norm": 0.8738980293273926,
+ "learning_rate": 3.592068281532857e-08,
+ "loss": 0.4194,
+ "step": 1816
+ },
+ {
+ "epoch": 5.679540709812109,
+ "grad_norm": 0.7967526912689209,
+ "learning_rate": 3.519306389143468e-08,
+ "loss": 0.4199,
+ "step": 1817
+ },
+ {
+ "epoch": 5.682672233820459,
+ "grad_norm": 0.8052618503570557,
+ "learning_rate": 3.4472837750885726e-08,
+ "loss": 0.4301,
+ "step": 1818
+ },
+ {
+ "epoch": 5.68580375782881,
+ "grad_norm": 1.0626697540283203,
+ "learning_rate": 3.376000655388234e-08,
+ "loss": 0.4073,
+ "step": 1819
+ },
+ {
+ "epoch": 5.688935281837161,
+ "grad_norm": 0.8473744988441467,
+ "learning_rate": 3.3054572438443454e-08,
+ "loss": 0.4323,
+ "step": 1820
+ },
+ {
+ "epoch": 5.692066805845512,
+ "grad_norm": 0.785251796245575,
+ "learning_rate": 3.23565375204038e-08,
+ "loss": 0.4214,
+ "step": 1821
+ },
+ {
+ "epoch": 5.695198329853862,
+ "grad_norm": 0.8004971742630005,
+ "learning_rate": 3.1665903893403626e-08,
+ "loss": 0.4119,
+ "step": 1822
+ },
+ {
+ "epoch": 5.698329853862213,
+ "grad_norm": 0.9184899926185608,
+ "learning_rate": 3.098267362888541e-08,
+ "loss": 0.4267,
+ "step": 1823
+ },
+ {
+ "epoch": 5.701461377870563,
+ "grad_norm": 0.807667076587677,
+ "learning_rate": 3.030684877608575e-08,
+ "loss": 0.4301,
+ "step": 1824
+ },
+ {
+ "epoch": 5.7045929018789145,
+ "grad_norm": 0.7712723612785339,
+ "learning_rate": 2.963843136203043e-08,
+ "loss": 0.4182,
+ "step": 1825
+ },
+ {
+ "epoch": 5.707724425887265,
+ "grad_norm": 0.734040379524231,
+ "learning_rate": 2.8977423391527703e-08,
+ "loss": 0.4443,
+ "step": 1826
+ },
+ {
+ "epoch": 5.710855949895616,
+ "grad_norm": 1.1767381429672241,
+ "learning_rate": 2.8323826847162507e-08,
+ "loss": 0.4125,
+ "step": 1827
+ },
+ {
+ "epoch": 5.713987473903966,
+ "grad_norm": 0.8537562489509583,
+ "learning_rate": 2.7677643689289492e-08,
+ "loss": 0.4221,
+ "step": 1828
+ },
+ {
+ "epoch": 5.717118997912317,
+ "grad_norm": 0.7683084011077881,
+ "learning_rate": 2.7038875856029712e-08,
+ "loss": 0.4167,
+ "step": 1829
+ },
+ {
+ "epoch": 5.7202505219206685,
+ "grad_norm": 0.8932932615280151,
+ "learning_rate": 2.6407525263261736e-08,
+ "loss": 0.4277,
+ "step": 1830
+ },
+ {
+ "epoch": 5.723382045929019,
+ "grad_norm": 0.7592349052429199,
+ "learning_rate": 2.5783593804618312e-08,
+ "loss": 0.3938,
+ "step": 1831
+ },
+ {
+ "epoch": 5.726513569937369,
+ "grad_norm": 0.7657082676887512,
+ "learning_rate": 2.516708335147916e-08,
+ "loss": 0.3895,
+ "step": 1832
+ },
+ {
+ "epoch": 5.72964509394572,
+ "grad_norm": 0.8797202110290527,
+ "learning_rate": 2.4557995752965967e-08,
+ "loss": 0.4119,
+ "step": 1833
+ },
+ {
+ "epoch": 5.732776617954071,
+ "grad_norm": 0.7941285967826843,
+ "learning_rate": 2.3956332835937123e-08,
+ "loss": 0.3737,
+ "step": 1834
+ },
+ {
+ "epoch": 5.735908141962422,
+ "grad_norm": 0.7556982040405273,
+ "learning_rate": 2.3362096404981605e-08,
+ "loss": 0.42,
+ "step": 1835
+ },
+ {
+ "epoch": 5.739039665970773,
+ "grad_norm": 0.8625621795654297,
+ "learning_rate": 2.2775288242413705e-08,
+ "loss": 0.4112,
+ "step": 1836
+ },
+ {
+ "epoch": 5.742171189979123,
+ "grad_norm": 0.8050491809844971,
+ "learning_rate": 2.219591010826805e-08,
+ "loss": 0.4222,
+ "step": 1837
+ },
+ {
+ "epoch": 5.745302713987474,
+ "grad_norm": 0.7006460428237915,
+ "learning_rate": 2.1623963740294574e-08,
+ "loss": 0.4187,
+ "step": 1838
+ },
+ {
+ "epoch": 5.748434237995824,
+ "grad_norm": 0.8389616012573242,
+ "learning_rate": 2.105945085395189e-08,
+ "loss": 0.4258,
+ "step": 1839
+ },
+ {
+ "epoch": 5.751565762004176,
+ "grad_norm": 0.8111906051635742,
+ "learning_rate": 2.050237314240422e-08,
+ "loss": 0.4078,
+ "step": 1840
+ },
+ {
+ "epoch": 5.754697286012526,
+ "grad_norm": 0.7622024416923523,
+ "learning_rate": 1.9952732276514174e-08,
+ "loss": 0.448,
+ "step": 1841
+ },
+ {
+ "epoch": 5.757828810020877,
+ "grad_norm": 0.8661556243896484,
+ "learning_rate": 1.941052990483916e-08,
+ "loss": 0.4058,
+ "step": 1842
+ },
+ {
+ "epoch": 5.760960334029227,
+ "grad_norm": 0.8311405181884766,
+ "learning_rate": 1.8875767653626366e-08,
+ "loss": 0.4126,
+ "step": 1843
+ },
+ {
+ "epoch": 5.764091858037578,
+ "grad_norm": 0.90278559923172,
+ "learning_rate": 1.834844712680722e-08,
+ "loss": 0.4541,
+ "step": 1844
+ },
+ {
+ "epoch": 5.767223382045929,
+ "grad_norm": 0.854500949382782,
+ "learning_rate": 1.782856990599269e-08,
+ "loss": 0.4233,
+ "step": 1845
+ },
+ {
+ "epoch": 5.77035490605428,
+ "grad_norm": 1.0461066961288452,
+ "learning_rate": 1.7316137550469625e-08,
+ "loss": 0.4011,
+ "step": 1846
+ },
+ {
+ "epoch": 5.773486430062631,
+ "grad_norm": 0.7584487199783325,
+ "learning_rate": 1.6811151597194153e-08,
+ "loss": 0.4156,
+ "step": 1847
+ },
+ {
+ "epoch": 5.776617954070981,
+ "grad_norm": 0.8103524446487427,
+ "learning_rate": 1.6313613560788865e-08,
+ "loss": 0.437,
+ "step": 1848
+ },
+ {
+ "epoch": 5.7797494780793315,
+ "grad_norm": 0.7988206744194031,
+ "learning_rate": 1.5823524933537283e-08,
+ "loss": 0.4259,
+ "step": 1849
+ },
+ {
+ "epoch": 5.782881002087683,
+ "grad_norm": 0.788215696811676,
+ "learning_rate": 1.5340887185379684e-08,
+ "loss": 0.4377,
+ "step": 1850
+ },
+ {
+ "epoch": 5.786012526096034,
+ "grad_norm": 0.835552990436554,
+ "learning_rate": 1.4865701763908947e-08,
+ "loss": 0.4378,
+ "step": 1851
+ },
+ {
+ "epoch": 5.789144050104384,
+ "grad_norm": 0.8064237236976624,
+ "learning_rate": 1.4397970094364999e-08,
+ "loss": 0.4396,
+ "step": 1852
+ },
+ {
+ "epoch": 5.792275574112735,
+ "grad_norm": 0.8313791155815125,
+ "learning_rate": 1.3937693579632872e-08,
+ "loss": 0.4411,
+ "step": 1853
+ },
+ {
+ "epoch": 5.7954070981210855,
+ "grad_norm": 0.8237503170967102,
+ "learning_rate": 1.3484873600236037e-08,
+ "loss": 0.4385,
+ "step": 1854
+ },
+ {
+ "epoch": 5.798538622129437,
+ "grad_norm": 0.8513125777244568,
+ "learning_rate": 1.3039511514333359e-08,
+ "loss": 0.4298,
+ "step": 1855
+ },
+ {
+ "epoch": 5.801670146137787,
+ "grad_norm": 0.7637052536010742,
+ "learning_rate": 1.2601608657715758e-08,
+ "loss": 0.4116,
+ "step": 1856
+ },
+ {
+ "epoch": 5.804801670146138,
+ "grad_norm": 0.7973178625106812,
+ "learning_rate": 1.2171166343800388e-08,
+ "loss": 0.4333,
+ "step": 1857
+ },
+ {
+ "epoch": 5.807933194154488,
+ "grad_norm": 0.7717419266700745,
+ "learning_rate": 1.1748185863628414e-08,
+ "loss": 0.4138,
+ "step": 1858
+ },
+ {
+ "epoch": 5.811064718162839,
+ "grad_norm": 0.773685097694397,
+ "learning_rate": 1.1332668485860287e-08,
+ "loss": 0.389,
+ "step": 1859
+ },
+ {
+ "epoch": 5.81419624217119,
+ "grad_norm": 0.8590340614318848,
+ "learning_rate": 1.092461545677187e-08,
+ "loss": 0.4393,
+ "step": 1860
+ },
+ {
+ "epoch": 5.817327766179541,
+ "grad_norm": 0.7766469717025757,
+ "learning_rate": 1.0524028000251651e-08,
+ "loss": 0.4546,
+ "step": 1861
+ },
+ {
+ "epoch": 5.820459290187891,
+ "grad_norm": 0.843730628490448,
+ "learning_rate": 1.0130907317795757e-08,
+ "loss": 0.4473,
+ "step": 1862
+ },
+ {
+ "epoch": 5.823590814196242,
+ "grad_norm": 0.7406162619590759,
+ "learning_rate": 9.745254588504615e-09,
+ "loss": 0.4064,
+ "step": 1863
+ },
+ {
+ "epoch": 5.826722338204593,
+ "grad_norm": 0.8189436793327332,
+ "learning_rate": 9.36707096908046e-09,
+ "loss": 0.4095,
+ "step": 1864
+ },
+ {
+ "epoch": 5.829853862212944,
+ "grad_norm": 0.8491302132606506,
+ "learning_rate": 8.996357593823445e-09,
+ "loss": 0.4425,
+ "step": 1865
+ },
+ {
+ "epoch": 5.832985386221294,
+ "grad_norm": 0.8554711937904358,
+ "learning_rate": 8.633115574626649e-09,
+ "loss": 0.43,
+ "step": 1866
+ },
+ {
+ "epoch": 5.836116910229645,
+ "grad_norm": 0.8352468013763428,
+ "learning_rate": 8.277346000974961e-09,
+ "loss": 0.4471,
+ "step": 1867
+ },
+ {
+ "epoch": 5.839248434237996,
+ "grad_norm": 0.7572605609893799,
+ "learning_rate": 7.929049939940924e-09,
+ "loss": 0.3961,
+ "step": 1868
+ },
+ {
+ "epoch": 5.8423799582463465,
+ "grad_norm": 0.7781088948249817,
+ "learning_rate": 7.5882284361814e-09,
+ "loss": 0.4198,
+ "step": 1869
+ },
+ {
+ "epoch": 5.845511482254698,
+ "grad_norm": 0.780062198638916,
+ "learning_rate": 7.254882511934236e-09,
+ "loss": 0.4275,
+ "step": 1870
+ },
+ {
+ "epoch": 5.848643006263048,
+ "grad_norm": 0.7632731795310974,
+ "learning_rate": 6.929013167016052e-09,
+ "loss": 0.448,
+ "step": 1871
+ },
+ {
+ "epoch": 5.851774530271399,
+ "grad_norm": 0.8311516642570496,
+ "learning_rate": 6.610621378818349e-09,
+ "loss": 0.4242,
+ "step": 1872
+ },
+ {
+ "epoch": 5.854906054279749,
+ "grad_norm": 0.790691077709198,
+ "learning_rate": 6.299708102304736e-09,
+ "loss": 0.4362,
+ "step": 1873
+ },
+ {
+ "epoch": 5.8580375782881005,
+ "grad_norm": 0.8050321340560913,
+ "learning_rate": 5.996274270008429e-09,
+ "loss": 0.4145,
+ "step": 1874
+ },
+ {
+ "epoch": 5.861169102296451,
+ "grad_norm": 0.8381393551826477,
+ "learning_rate": 5.700320792029757e-09,
+ "loss": 0.4229,
+ "step": 1875
+ },
+ {
+ "epoch": 5.864300626304802,
+ "grad_norm": 0.8540353775024414,
+ "learning_rate": 5.411848556032273e-09,
+ "loss": 0.4271,
+ "step": 1876
+ },
+ {
+ "epoch": 5.867432150313152,
+ "grad_norm": 0.7595441341400146,
+ "learning_rate": 5.1308584272410925e-09,
+ "loss": 0.4085,
+ "step": 1877
+ },
+ {
+ "epoch": 5.870563674321503,
+ "grad_norm": 0.8113922476768494,
+ "learning_rate": 4.857351248440112e-09,
+ "loss": 0.3848,
+ "step": 1878
+ },
+ {
+ "epoch": 5.8736951983298535,
+ "grad_norm": 0.8255279660224915,
+ "learning_rate": 4.591327839968685e-09,
+ "loss": 0.3989,
+ "step": 1879
+ },
+ {
+ "epoch": 5.876826722338205,
+ "grad_norm": 0.9661470651626587,
+ "learning_rate": 4.332788999720505e-09,
+ "loss": 0.449,
+ "step": 1880
+ },
+ {
+ "epoch": 5.879958246346555,
+ "grad_norm": 0.8207715153694153,
+ "learning_rate": 4.0817355031405606e-09,
+ "loss": 0.4236,
+ "step": 1881
+ },
+ {
+ "epoch": 5.883089770354906,
+ "grad_norm": 0.8630990982055664,
+ "learning_rate": 3.838168103222628e-09,
+ "loss": 0.4362,
+ "step": 1882
+ },
+ {
+ "epoch": 5.886221294363256,
+ "grad_norm": 0.729789137840271,
+ "learning_rate": 3.6020875305067816e-09,
+ "loss": 0.4224,
+ "step": 1883
+ },
+ {
+ "epoch": 5.8893528183716075,
+ "grad_norm": 0.8223000764846802,
+ "learning_rate": 3.373494493078e-09,
+ "loss": 0.4534,
+ "step": 1884
+ },
+ {
+ "epoch": 5.892484342379959,
+ "grad_norm": 0.8721533417701721,
+ "learning_rate": 3.1523896765633945e-09,
+ "loss": 0.4232,
+ "step": 1885
+ },
+ {
+ "epoch": 5.895615866388309,
+ "grad_norm": 0.9347804188728333,
+ "learning_rate": 2.9387737441308183e-09,
+ "loss": 0.4525,
+ "step": 1886
+ },
+ {
+ "epoch": 5.89874739039666,
+ "grad_norm": 0.8137363195419312,
+ "learning_rate": 2.732647336486094e-09,
+ "loss": 0.4049,
+ "step": 1887
+ },
+ {
+ "epoch": 5.90187891440501,
+ "grad_norm": 0.9206838607788086,
+ "learning_rate": 2.534011071871345e-09,
+ "loss": 0.4177,
+ "step": 1888
+ },
+ {
+ "epoch": 5.9050104384133615,
+ "grad_norm": 0.8816237449645996,
+ "learning_rate": 2.342865546063611e-09,
+ "loss": 0.4168,
+ "step": 1889
+ },
+ {
+ "epoch": 5.908141962421712,
+ "grad_norm": 0.7795571088790894,
+ "learning_rate": 2.1592113323720686e-09,
+ "loss": 0.4694,
+ "step": 1890
+ },
+ {
+ "epoch": 5.911273486430063,
+ "grad_norm": 0.8177521228790283,
+ "learning_rate": 1.983048981638036e-09,
+ "loss": 0.4035,
+ "step": 1891
+ },
+ {
+ "epoch": 5.914405010438413,
+ "grad_norm": 0.9472755789756775,
+ "learning_rate": 1.8143790222308055e-09,
+ "loss": 0.4372,
+ "step": 1892
+ },
+ {
+ "epoch": 5.917536534446764,
+ "grad_norm": 0.7970467805862427,
+ "learning_rate": 1.653201960048756e-09,
+ "loss": 0.4002,
+ "step": 1893
+ },
+ {
+ "epoch": 5.920668058455115,
+ "grad_norm": 0.811830461025238,
+ "learning_rate": 1.4995182785157437e-09,
+ "loss": 0.4343,
+ "step": 1894
+ },
+ {
+ "epoch": 5.923799582463466,
+ "grad_norm": 0.8110922574996948,
+ "learning_rate": 1.353328438580548e-09,
+ "loss": 0.4022,
+ "step": 1895
+ },
+ {
+ "epoch": 5.926931106471816,
+ "grad_norm": 0.7824969291687012,
+ "learning_rate": 1.2146328787160378e-09,
+ "loss": 0.4078,
+ "step": 1896
+ },
+ {
+ "epoch": 5.930062630480167,
+ "grad_norm": 0.8015698790550232,
+ "learning_rate": 1.0834320149163969e-09,
+ "loss": 0.4223,
+ "step": 1897
+ },
+ {
+ "epoch": 5.933194154488517,
+ "grad_norm": 1.0146117210388184,
+ "learning_rate": 9.59726240697123e-10,
+ "loss": 0.4295,
+ "step": 1898
+ },
+ {
+ "epoch": 5.9363256784968685,
+ "grad_norm": 0.8403705358505249,
+ "learning_rate": 8.435159270933635e-10,
+ "loss": 0.4097,
+ "step": 1899
+ },
+ {
+ "epoch": 5.939457202505219,
+ "grad_norm": 0.8461981415748596,
+ "learning_rate": 7.348014226588041e-10,
+ "loss": 0.4141,
+ "step": 1900
+ },
+ {
+ "epoch": 5.94258872651357,
+ "grad_norm": 0.750941812992096,
+ "learning_rate": 6.335830534648368e-10,
+ "loss": 0.4372,
+ "step": 1901
+ },
+ {
+ "epoch": 5.945720250521921,
+ "grad_norm": 0.7628970146179199,
+ "learning_rate": 5.3986112309945e-10,
+ "loss": 0.4195,
+ "step": 1902
+ },
+ {
+ "epoch": 5.948851774530271,
+ "grad_norm": 0.7590934038162231,
+ "learning_rate": 4.536359126658396e-10,
+ "loss": 0.4095,
+ "step": 1903
+ },
+ {
+ "epoch": 5.9519832985386225,
+ "grad_norm": 0.8931977152824402,
+ "learning_rate": 3.749076807826879e-10,
+ "loss": 0.4644,
+ "step": 1904
+ },
+ {
+ "epoch": 5.955114822546973,
+ "grad_norm": 1.068721055984497,
+ "learning_rate": 3.036766635822197e-10,
+ "loss": 0.4522,
+ "step": 1905
+ },
+ {
+ "epoch": 5.958246346555324,
+ "grad_norm": 0.7380474805831909,
+ "learning_rate": 2.3994307471020275e-10,
+ "loss": 0.4296,
+ "step": 1906
+ },
+ {
+ "epoch": 5.961377870563674,
+ "grad_norm": 0.765549898147583,
+ "learning_rate": 1.8370710532483738e-10,
+ "loss": 0.4462,
+ "step": 1907
+ },
+ {
+ "epoch": 5.964509394572025,
+ "grad_norm": 0.7848723530769348,
+ "learning_rate": 1.349689240970342e-10,
+ "loss": 0.4282,
+ "step": 1908
+ },
+ {
+ "epoch": 5.967640918580376,
+ "grad_norm": 0.8109093308448792,
+ "learning_rate": 9.372867720874867e-11,
+ "loss": 0.4064,
+ "step": 1909
+ },
+ {
+ "epoch": 5.970772442588727,
+ "grad_norm": 0.8530039191246033,
+ "learning_rate": 5.99864883532586e-11,
+ "loss": 0.437,
+ "step": 1910
+ },
+ {
+ "epoch": 5.973903966597077,
+ "grad_norm": 0.7433366179466248,
+ "learning_rate": 3.3742458734886684e-11,
+ "loss": 0.4156,
+ "step": 1911
+ },
+ {
+ "epoch": 5.977035490605428,
+ "grad_norm": 0.8591077923774719,
+ "learning_rate": 1.499666706844538e-11,
+ "loss": 0.4239,
+ "step": 1912
+ },
+ {
+ "epoch": 5.980167014613778,
+ "grad_norm": 0.8396080732345581,
+ "learning_rate": 3.7491695784042105e-12,
+ "loss": 0.4363,
+ "step": 1913
+ },
+ {
+ "epoch": 5.98329853862213,
+ "grad_norm": 0.775199294090271,
+ "learning_rate": 0.0,
+ "loss": 0.4328,
+ "step": 1914
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1914,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 319,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": true
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 5.798644914657742e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-1914/training_args.bin b/checkpoint-1914/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..8067ee9c1c0bc752bdfd00cfcaf1a6e717d2356b
--- /dev/null
+++ b/checkpoint-1914/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c650156a192cae0a2070c4323ee8a93e9b52fb76041d59ae0633b98389585727
+size 7928
diff --git a/checkpoint-1914/zero_to_fp32.py b/checkpoint-1914/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-1914/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-319/README.md b/checkpoint-319/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4a3934800eeb082a0cb833d7b6af4f68eed3615
--- /dev/null
+++ b/checkpoint-319/README.md
@@ -0,0 +1,202 @@
+---
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-319/adapter_config.json b/checkpoint-319/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/checkpoint-319/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-319/adapter_model.safetensors b/checkpoint-319/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d8f396f36935f54e9ebf74f4fa741d56788b35f7
--- /dev/null
+++ b/checkpoint-319/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c1d3e52970fd1c9220ff78f8dcfbe7d50076195a3997fc7cf94457e7972616fe
+size 9016826528
diff --git a/checkpoint-319/global_step319/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-319/global_step319/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..92fe5b826731a4c210b62fe28df91b6ef75ef83f
--- /dev/null
+++ b/checkpoint-319/global_step319/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ea7c861852a5aa99dba3052911a00ffe96b3172c1a2b2a5dad54f0aac4d02b5e
+size 27050164444
diff --git a/checkpoint-319/global_step319/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-319/global_step319/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..3060e2b85be4c68e2bf431dc8059b0342ae145f4
--- /dev/null
+++ b/checkpoint-319/global_step319/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b1c237d41ade0cfcb45da2bce873d64ad89d906aaa159c7f712adf225e230add
+size 27050169884
diff --git a/checkpoint-319/global_step319/mp_rank_00_model_states.pt b/checkpoint-319/global_step319/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..77758dd3eb63a655fec29a2bf149fde2592398a8
--- /dev/null
+++ b/checkpoint-319/global_step319/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a5b37167ceb09ad4ac9008da28670137f0be4d37889138803cb026f28e59d959
+size 9776788601
diff --git a/checkpoint-319/latest b/checkpoint-319/latest
new file mode 100644
index 0000000000000000000000000000000000000000..ce0a9f612309feaeb884433bac375e628247dd48
--- /dev/null
+++ b/checkpoint-319/latest
@@ -0,0 +1 @@
+global_step319
\ No newline at end of file
diff --git a/checkpoint-319/rng_state_0.pth b/checkpoint-319/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b75feb823766df9e6ee9c6c1c5c1abc161824288
--- /dev/null
+++ b/checkpoint-319/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:48d5fdf33d1910923036f72a78ece8e55f970c798de41ce0a723ac98cc6299e6
+size 14512
diff --git a/checkpoint-319/rng_state_1.pth b/checkpoint-319/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..8b6068a4d49244e0256e72ae95d75668111571be
--- /dev/null
+++ b/checkpoint-319/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:add034b30147f78680605f81076318e2111c9bfc3c9d831bfbcd6a7fd1ffed80
+size 14512
diff --git a/checkpoint-319/scheduler.pt b/checkpoint-319/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..4cf1ca1199c39d96498b722ffde6e486a7bc41cb
--- /dev/null
+++ b/checkpoint-319/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2eb3e60ca57802cfb48a712a9f58abd00a68f442d18b53579c9399f950b437d8
+size 1064
diff --git a/checkpoint-319/special_tokens_map.json b/checkpoint-319/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-319/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-319/tokenizer.json b/checkpoint-319/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-319/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-319/tokenizer_config.json b/checkpoint-319/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/checkpoint-319/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-319/trainer_state.json b/checkpoint-319/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..a8028f671b0dffd593c979d107be62d39698bca4
--- /dev/null
+++ b/checkpoint-319/trainer_state.json
@@ -0,0 +1,2266 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.9989561586638831,
+ "eval_steps": 500,
+ "global_step": 319,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.003131524008350731,
+ "grad_norm": 13.917898178100586,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 4.1051,
+ "step": 1
+ },
+ {
+ "epoch": 0.006263048016701462,
+ "grad_norm": 17.327869415283203,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 4.1048,
+ "step": 2
+ },
+ {
+ "epoch": 0.009394572025052192,
+ "grad_norm": 14.063946723937988,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 4.0741,
+ "step": 3
+ },
+ {
+ "epoch": 0.012526096033402923,
+ "grad_norm": 16.817699432373047,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 4.2002,
+ "step": 4
+ },
+ {
+ "epoch": 0.015657620041753653,
+ "grad_norm": 14.47036361694336,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 4.2652,
+ "step": 5
+ },
+ {
+ "epoch": 0.018789144050104383,
+ "grad_norm": 14.474193572998047,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 4.0888,
+ "step": 6
+ },
+ {
+ "epoch": 0.021920668058455117,
+ "grad_norm": 14.865458488464355,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 4.0014,
+ "step": 7
+ },
+ {
+ "epoch": 0.025052192066805846,
+ "grad_norm": 15.338888168334961,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 4.13,
+ "step": 8
+ },
+ {
+ "epoch": 0.028183716075156576,
+ "grad_norm": 15.154336929321289,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 4.2493,
+ "step": 9
+ },
+ {
+ "epoch": 0.031315240083507306,
+ "grad_norm": 15.919597625732422,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 4.0535,
+ "step": 10
+ },
+ {
+ "epoch": 0.03444676409185804,
+ "grad_norm": 14.981926918029785,
+ "learning_rate": 5.5e-07,
+ "loss": 3.9064,
+ "step": 11
+ },
+ {
+ "epoch": 0.037578288100208766,
+ "grad_norm": 13.36101245880127,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 4.1939,
+ "step": 12
+ },
+ {
+ "epoch": 0.0407098121085595,
+ "grad_norm": 15.58773422241211,
+ "learning_rate": 6.5e-07,
+ "loss": 4.18,
+ "step": 13
+ },
+ {
+ "epoch": 0.04384133611691023,
+ "grad_norm": 13.560139656066895,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 3.9414,
+ "step": 14
+ },
+ {
+ "epoch": 0.04697286012526096,
+ "grad_norm": 12.307971954345703,
+ "learning_rate": 7.5e-07,
+ "loss": 3.8836,
+ "step": 15
+ },
+ {
+ "epoch": 0.05010438413361169,
+ "grad_norm": 14.533182144165039,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 4.1551,
+ "step": 16
+ },
+ {
+ "epoch": 0.05323590814196242,
+ "grad_norm": 13.453729629516602,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 4.0048,
+ "step": 17
+ },
+ {
+ "epoch": 0.05636743215031315,
+ "grad_norm": 13.45992374420166,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 4.0745,
+ "step": 18
+ },
+ {
+ "epoch": 0.059498956158663886,
+ "grad_norm": 11.857145309448242,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 3.9871,
+ "step": 19
+ },
+ {
+ "epoch": 0.06263048016701461,
+ "grad_norm": 11.872294425964355,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 3.8959,
+ "step": 20
+ },
+ {
+ "epoch": 0.06576200417536535,
+ "grad_norm": 12.969825744628906,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 4.0308,
+ "step": 21
+ },
+ {
+ "epoch": 0.06889352818371608,
+ "grad_norm": 12.33769416809082,
+ "learning_rate": 1.1e-06,
+ "loss": 3.9341,
+ "step": 22
+ },
+ {
+ "epoch": 0.0720250521920668,
+ "grad_norm": 12.669405937194824,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 3.8511,
+ "step": 23
+ },
+ {
+ "epoch": 0.07515657620041753,
+ "grad_norm": 10.677213668823242,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 3.7764,
+ "step": 24
+ },
+ {
+ "epoch": 0.07828810020876827,
+ "grad_norm": 10.366402626037598,
+ "learning_rate": 1.25e-06,
+ "loss": 3.5291,
+ "step": 25
+ },
+ {
+ "epoch": 0.081419624217119,
+ "grad_norm": 11.211421012878418,
+ "learning_rate": 1.3e-06,
+ "loss": 3.5765,
+ "step": 26
+ },
+ {
+ "epoch": 0.08455114822546973,
+ "grad_norm": 11.313716888427734,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 3.4849,
+ "step": 27
+ },
+ {
+ "epoch": 0.08768267223382047,
+ "grad_norm": 10.41294002532959,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 3.2653,
+ "step": 28
+ },
+ {
+ "epoch": 0.09081419624217119,
+ "grad_norm": 10.40064525604248,
+ "learning_rate": 1.45e-06,
+ "loss": 3.3384,
+ "step": 29
+ },
+ {
+ "epoch": 0.09394572025052192,
+ "grad_norm": 10.05427074432373,
+ "learning_rate": 1.5e-06,
+ "loss": 3.2257,
+ "step": 30
+ },
+ {
+ "epoch": 0.09707724425887265,
+ "grad_norm": 9.583163261413574,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 3.1371,
+ "step": 31
+ },
+ {
+ "epoch": 0.10020876826722339,
+ "grad_norm": 10.09977912902832,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 3.0658,
+ "step": 32
+ },
+ {
+ "epoch": 0.10334029227557412,
+ "grad_norm": 9.271486282348633,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 2.9693,
+ "step": 33
+ },
+ {
+ "epoch": 0.10647181628392484,
+ "grad_norm": 10.687992095947266,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 2.95,
+ "step": 34
+ },
+ {
+ "epoch": 0.10960334029227557,
+ "grad_norm": 8.762290000915527,
+ "learning_rate": 1.75e-06,
+ "loss": 2.8286,
+ "step": 35
+ },
+ {
+ "epoch": 0.1127348643006263,
+ "grad_norm": 10.13785171508789,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 2.3664,
+ "step": 36
+ },
+ {
+ "epoch": 0.11586638830897704,
+ "grad_norm": 18.301353454589844,
+ "learning_rate": 1.85e-06,
+ "loss": 2.5533,
+ "step": 37
+ },
+ {
+ "epoch": 0.11899791231732777,
+ "grad_norm": 11.490377426147461,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 2.6133,
+ "step": 38
+ },
+ {
+ "epoch": 0.12212943632567849,
+ "grad_norm": 15.614163398742676,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 2.3596,
+ "step": 39
+ },
+ {
+ "epoch": 0.12526096033402923,
+ "grad_norm": 17.757442474365234,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 2.3491,
+ "step": 40
+ },
+ {
+ "epoch": 0.12839248434237996,
+ "grad_norm": 17.18431854248047,
+ "learning_rate": 2.05e-06,
+ "loss": 2.2361,
+ "step": 41
+ },
+ {
+ "epoch": 0.1315240083507307,
+ "grad_norm": 16.149789810180664,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 2.1457,
+ "step": 42
+ },
+ {
+ "epoch": 0.13465553235908143,
+ "grad_norm": 15.256914138793945,
+ "learning_rate": 2.15e-06,
+ "loss": 2.12,
+ "step": 43
+ },
+ {
+ "epoch": 0.13778705636743216,
+ "grad_norm": 15.537406921386719,
+ "learning_rate": 2.2e-06,
+ "loss": 2.1877,
+ "step": 44
+ },
+ {
+ "epoch": 0.1409185803757829,
+ "grad_norm": 7.947713851928711,
+ "learning_rate": 2.25e-06,
+ "loss": 2.1648,
+ "step": 45
+ },
+ {
+ "epoch": 0.1440501043841336,
+ "grad_norm": 8.818676948547363,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 2.134,
+ "step": 46
+ },
+ {
+ "epoch": 0.14718162839248433,
+ "grad_norm": 5.175768852233887,
+ "learning_rate": 2.35e-06,
+ "loss": 2.0796,
+ "step": 47
+ },
+ {
+ "epoch": 0.15031315240083507,
+ "grad_norm": 6.750611305236816,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 1.9174,
+ "step": 48
+ },
+ {
+ "epoch": 0.1534446764091858,
+ "grad_norm": 6.2147979736328125,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 1.8065,
+ "step": 49
+ },
+ {
+ "epoch": 0.15657620041753653,
+ "grad_norm": 13.291611671447754,
+ "learning_rate": 2.5e-06,
+ "loss": 1.7061,
+ "step": 50
+ },
+ {
+ "epoch": 0.15970772442588727,
+ "grad_norm": 7.251201629638672,
+ "learning_rate": 2.55e-06,
+ "loss": 1.7924,
+ "step": 51
+ },
+ {
+ "epoch": 0.162839248434238,
+ "grad_norm": 5.2126054763793945,
+ "learning_rate": 2.6e-06,
+ "loss": 1.6735,
+ "step": 52
+ },
+ {
+ "epoch": 0.16597077244258873,
+ "grad_norm": 5.435528755187988,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 1.6265,
+ "step": 53
+ },
+ {
+ "epoch": 0.16910229645093947,
+ "grad_norm": 4.505807399749756,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 1.4851,
+ "step": 54
+ },
+ {
+ "epoch": 0.1722338204592902,
+ "grad_norm": 5.128388404846191,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 1.5832,
+ "step": 55
+ },
+ {
+ "epoch": 0.17536534446764093,
+ "grad_norm": 16.935827255249023,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 1.6553,
+ "step": 56
+ },
+ {
+ "epoch": 0.17849686847599164,
+ "grad_norm": 3.664458990097046,
+ "learning_rate": 2.85e-06,
+ "loss": 1.5,
+ "step": 57
+ },
+ {
+ "epoch": 0.18162839248434237,
+ "grad_norm": 7.763802528381348,
+ "learning_rate": 2.9e-06,
+ "loss": 1.367,
+ "step": 58
+ },
+ {
+ "epoch": 0.1847599164926931,
+ "grad_norm": 3.2216155529022217,
+ "learning_rate": 2.95e-06,
+ "loss": 1.3863,
+ "step": 59
+ },
+ {
+ "epoch": 0.18789144050104384,
+ "grad_norm": 4.384445667266846,
+ "learning_rate": 3e-06,
+ "loss": 1.4247,
+ "step": 60
+ },
+ {
+ "epoch": 0.19102296450939457,
+ "grad_norm": 4.8080878257751465,
+ "learning_rate": 3.05e-06,
+ "loss": 1.3257,
+ "step": 61
+ },
+ {
+ "epoch": 0.1941544885177453,
+ "grad_norm": 4.154761791229248,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 1.321,
+ "step": 62
+ },
+ {
+ "epoch": 0.19728601252609604,
+ "grad_norm": 6.4742112159729,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 1.2823,
+ "step": 63
+ },
+ {
+ "epoch": 0.20041753653444677,
+ "grad_norm": 2.583422899246216,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 1.2136,
+ "step": 64
+ },
+ {
+ "epoch": 0.2035490605427975,
+ "grad_norm": 4.1933488845825195,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 1.1855,
+ "step": 65
+ },
+ {
+ "epoch": 0.20668058455114824,
+ "grad_norm": 4.11049747467041,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 1.2389,
+ "step": 66
+ },
+ {
+ "epoch": 0.20981210855949894,
+ "grad_norm": 2.264458417892456,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 1.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.21294363256784968,
+ "grad_norm": 2.5408174991607666,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 1.1389,
+ "step": 68
+ },
+ {
+ "epoch": 0.2160751565762004,
+ "grad_norm": 7.82421350479126,
+ "learning_rate": 3.45e-06,
+ "loss": 1.0956,
+ "step": 69
+ },
+ {
+ "epoch": 0.21920668058455114,
+ "grad_norm": 3.070939064025879,
+ "learning_rate": 3.5e-06,
+ "loss": 1.0451,
+ "step": 70
+ },
+ {
+ "epoch": 0.22233820459290188,
+ "grad_norm": 2.6310527324676514,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 1.0538,
+ "step": 71
+ },
+ {
+ "epoch": 0.2254697286012526,
+ "grad_norm": 7.630155563354492,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 1.0052,
+ "step": 72
+ },
+ {
+ "epoch": 0.22860125260960334,
+ "grad_norm": 6.950636863708496,
+ "learning_rate": 3.65e-06,
+ "loss": 1.0473,
+ "step": 73
+ },
+ {
+ "epoch": 0.23173277661795408,
+ "grad_norm": 2.2703945636749268,
+ "learning_rate": 3.7e-06,
+ "loss": 1.0576,
+ "step": 74
+ },
+ {
+ "epoch": 0.2348643006263048,
+ "grad_norm": 3.3817710876464844,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 1.0177,
+ "step": 75
+ },
+ {
+ "epoch": 0.23799582463465555,
+ "grad_norm": 7.266414642333984,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 1.0645,
+ "step": 76
+ },
+ {
+ "epoch": 0.24112734864300625,
+ "grad_norm": 5.782608509063721,
+ "learning_rate": 3.85e-06,
+ "loss": 1.0162,
+ "step": 77
+ },
+ {
+ "epoch": 0.24425887265135698,
+ "grad_norm": 2.7938575744628906,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.9664,
+ "step": 78
+ },
+ {
+ "epoch": 0.24739039665970772,
+ "grad_norm": 6.681935787200928,
+ "learning_rate": 3.95e-06,
+ "loss": 0.953,
+ "step": 79
+ },
+ {
+ "epoch": 0.25052192066805845,
+ "grad_norm": 2.253279209136963,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.9568,
+ "step": 80
+ },
+ {
+ "epoch": 0.2536534446764092,
+ "grad_norm": 1.4875826835632324,
+ "learning_rate": 4.05e-06,
+ "loss": 0.9448,
+ "step": 81
+ },
+ {
+ "epoch": 0.2567849686847599,
+ "grad_norm": 2.4987940788269043,
+ "learning_rate": 4.1e-06,
+ "loss": 0.9393,
+ "step": 82
+ },
+ {
+ "epoch": 0.2599164926931106,
+ "grad_norm": 4.712948322296143,
+ "learning_rate": 4.15e-06,
+ "loss": 0.9532,
+ "step": 83
+ },
+ {
+ "epoch": 0.2630480167014614,
+ "grad_norm": 6.9030632972717285,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.96,
+ "step": 84
+ },
+ {
+ "epoch": 0.2661795407098121,
+ "grad_norm": 3.4780967235565186,
+ "learning_rate": 4.25e-06,
+ "loss": 0.8993,
+ "step": 85
+ },
+ {
+ "epoch": 0.26931106471816285,
+ "grad_norm": 1.526064395904541,
+ "learning_rate": 4.3e-06,
+ "loss": 0.9021,
+ "step": 86
+ },
+ {
+ "epoch": 0.27244258872651356,
+ "grad_norm": 10.727686882019043,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.856,
+ "step": 87
+ },
+ {
+ "epoch": 0.2755741127348643,
+ "grad_norm": 12.483160972595215,
+ "learning_rate": 4.4e-06,
+ "loss": 0.9357,
+ "step": 88
+ },
+ {
+ "epoch": 0.278705636743215,
+ "grad_norm": 6.544492244720459,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.9168,
+ "step": 89
+ },
+ {
+ "epoch": 0.2818371607515658,
+ "grad_norm": 1.178139567375183,
+ "learning_rate": 4.5e-06,
+ "loss": 0.8748,
+ "step": 90
+ },
+ {
+ "epoch": 0.2849686847599165,
+ "grad_norm": 1.711506962776184,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.8425,
+ "step": 91
+ },
+ {
+ "epoch": 0.2881002087682672,
+ "grad_norm": 3.281747341156006,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.8491,
+ "step": 92
+ },
+ {
+ "epoch": 0.29123173277661796,
+ "grad_norm": 2.2964377403259277,
+ "learning_rate": 4.65e-06,
+ "loss": 0.8038,
+ "step": 93
+ },
+ {
+ "epoch": 0.29436325678496866,
+ "grad_norm": 1.959700345993042,
+ "learning_rate": 4.7e-06,
+ "loss": 0.8439,
+ "step": 94
+ },
+ {
+ "epoch": 0.2974947807933194,
+ "grad_norm": 3.979384183883667,
+ "learning_rate": 4.75e-06,
+ "loss": 0.8839,
+ "step": 95
+ },
+ {
+ "epoch": 0.30062630480167013,
+ "grad_norm": 1.4721262454986572,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.845,
+ "step": 96
+ },
+ {
+ "epoch": 0.3037578288100209,
+ "grad_norm": 2.862248659133911,
+ "learning_rate": 4.85e-06,
+ "loss": 0.7748,
+ "step": 97
+ },
+ {
+ "epoch": 0.3068893528183716,
+ "grad_norm": 3.7439088821411133,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.8145,
+ "step": 98
+ },
+ {
+ "epoch": 0.31002087682672236,
+ "grad_norm": 1.6654618978500366,
+ "learning_rate": 4.95e-06,
+ "loss": 0.8326,
+ "step": 99
+ },
+ {
+ "epoch": 0.31315240083507306,
+ "grad_norm": 7.8437581062316895,
+ "learning_rate": 5e-06,
+ "loss": 0.8666,
+ "step": 100
+ },
+ {
+ "epoch": 0.3162839248434238,
+ "grad_norm": 6.429738521575928,
+ "learning_rate": 4.999996250830422e-06,
+ "loss": 0.836,
+ "step": 101
+ },
+ {
+ "epoch": 0.31941544885177453,
+ "grad_norm": 2.6017794609069824,
+ "learning_rate": 4.9999850033329326e-06,
+ "loss": 0.7785,
+ "step": 102
+ },
+ {
+ "epoch": 0.32254697286012524,
+ "grad_norm": 1.0575449466705322,
+ "learning_rate": 4.999966257541265e-06,
+ "loss": 0.7639,
+ "step": 103
+ },
+ {
+ "epoch": 0.325678496868476,
+ "grad_norm": 2.6932010650634766,
+ "learning_rate": 4.999940013511647e-06,
+ "loss": 0.8214,
+ "step": 104
+ },
+ {
+ "epoch": 0.3288100208768267,
+ "grad_norm": 2.925288438796997,
+ "learning_rate": 4.999906271322792e-06,
+ "loss": 0.8797,
+ "step": 105
+ },
+ {
+ "epoch": 0.33194154488517746,
+ "grad_norm": 1.3570607900619507,
+ "learning_rate": 4.9998650310759035e-06,
+ "loss": 0.792,
+ "step": 106
+ },
+ {
+ "epoch": 0.33507306889352817,
+ "grad_norm": 5.126713752746582,
+ "learning_rate": 4.999816292894676e-06,
+ "loss": 0.8352,
+ "step": 107
+ },
+ {
+ "epoch": 0.33820459290187893,
+ "grad_norm": 1.8966432809829712,
+ "learning_rate": 4.99976005692529e-06,
+ "loss": 0.7663,
+ "step": 108
+ },
+ {
+ "epoch": 0.34133611691022964,
+ "grad_norm": 1.3100829124450684,
+ "learning_rate": 4.999696323336418e-06,
+ "loss": 0.771,
+ "step": 109
+ },
+ {
+ "epoch": 0.3444676409185804,
+ "grad_norm": 2.4025354385375977,
+ "learning_rate": 4.999625092319218e-06,
+ "loss": 0.7618,
+ "step": 110
+ },
+ {
+ "epoch": 0.3475991649269311,
+ "grad_norm": 1.130232810974121,
+ "learning_rate": 4.999546364087334e-06,
+ "loss": 0.7705,
+ "step": 111
+ },
+ {
+ "epoch": 0.35073068893528186,
+ "grad_norm": 3.430262327194214,
+ "learning_rate": 4.999460138876901e-06,
+ "loss": 0.77,
+ "step": 112
+ },
+ {
+ "epoch": 0.35386221294363257,
+ "grad_norm": 1.1272103786468506,
+ "learning_rate": 4.999366416946536e-06,
+ "loss": 0.7133,
+ "step": 113
+ },
+ {
+ "epoch": 0.3569937369519833,
+ "grad_norm": 1.1740471124649048,
+ "learning_rate": 4.999265198577342e-06,
+ "loss": 0.7684,
+ "step": 114
+ },
+ {
+ "epoch": 0.36012526096033404,
+ "grad_norm": 1.3138248920440674,
+ "learning_rate": 4.999156484072907e-06,
+ "loss": 0.7888,
+ "step": 115
+ },
+ {
+ "epoch": 0.36325678496868474,
+ "grad_norm": 1.061711311340332,
+ "learning_rate": 4.999040273759304e-06,
+ "loss": 0.7484,
+ "step": 116
+ },
+ {
+ "epoch": 0.3663883089770355,
+ "grad_norm": 1.4682390689849854,
+ "learning_rate": 4.998916567985083e-06,
+ "loss": 0.7296,
+ "step": 117
+ },
+ {
+ "epoch": 0.3695198329853862,
+ "grad_norm": 2.884068250656128,
+ "learning_rate": 4.998785367121284e-06,
+ "loss": 0.7662,
+ "step": 118
+ },
+ {
+ "epoch": 0.37265135699373697,
+ "grad_norm": 0.9812761545181274,
+ "learning_rate": 4.9986466715614205e-06,
+ "loss": 0.7307,
+ "step": 119
+ },
+ {
+ "epoch": 0.3757828810020877,
+ "grad_norm": 2.2237496376037598,
+ "learning_rate": 4.998500481721484e-06,
+ "loss": 0.6761,
+ "step": 120
+ },
+ {
+ "epoch": 0.37891440501043844,
+ "grad_norm": 1.4004178047180176,
+ "learning_rate": 4.998346798039952e-06,
+ "loss": 0.7505,
+ "step": 121
+ },
+ {
+ "epoch": 0.38204592901878914,
+ "grad_norm": 5.54975700378418,
+ "learning_rate": 4.99818562097777e-06,
+ "loss": 0.7615,
+ "step": 122
+ },
+ {
+ "epoch": 0.38517745302713985,
+ "grad_norm": 6.17140531539917,
+ "learning_rate": 4.9980169510183624e-06,
+ "loss": 0.7002,
+ "step": 123
+ },
+ {
+ "epoch": 0.3883089770354906,
+ "grad_norm": 4.974380016326904,
+ "learning_rate": 4.997840788667628e-06,
+ "loss": 0.7449,
+ "step": 124
+ },
+ {
+ "epoch": 0.3914405010438413,
+ "grad_norm": 1.4133399724960327,
+ "learning_rate": 4.997657134453937e-06,
+ "loss": 0.7442,
+ "step": 125
+ },
+ {
+ "epoch": 0.3945720250521921,
+ "grad_norm": 1.868915319442749,
+ "learning_rate": 4.9974659889281295e-06,
+ "loss": 0.7104,
+ "step": 126
+ },
+ {
+ "epoch": 0.3977035490605428,
+ "grad_norm": 1.2599350214004517,
+ "learning_rate": 4.997267352663514e-06,
+ "loss": 0.7385,
+ "step": 127
+ },
+ {
+ "epoch": 0.40083507306889354,
+ "grad_norm": 1.4353641271591187,
+ "learning_rate": 4.997061226255869e-06,
+ "loss": 0.7081,
+ "step": 128
+ },
+ {
+ "epoch": 0.40396659707724425,
+ "grad_norm": 3.2492141723632812,
+ "learning_rate": 4.996847610323437e-06,
+ "loss": 0.7859,
+ "step": 129
+ },
+ {
+ "epoch": 0.407098121085595,
+ "grad_norm": 9.599719047546387,
+ "learning_rate": 4.996626505506923e-06,
+ "loss": 0.7241,
+ "step": 130
+ },
+ {
+ "epoch": 0.4102296450939457,
+ "grad_norm": 10.053650856018066,
+ "learning_rate": 4.996397912469494e-06,
+ "loss": 0.6841,
+ "step": 131
+ },
+ {
+ "epoch": 0.4133611691022965,
+ "grad_norm": 1.323876976966858,
+ "learning_rate": 4.996161831896777e-06,
+ "loss": 0.7317,
+ "step": 132
+ },
+ {
+ "epoch": 0.4164926931106472,
+ "grad_norm": 1.4180598258972168,
+ "learning_rate": 4.9959182644968594e-06,
+ "loss": 0.692,
+ "step": 133
+ },
+ {
+ "epoch": 0.4196242171189979,
+ "grad_norm": 1.2194396257400513,
+ "learning_rate": 4.99566721100028e-06,
+ "loss": 0.7068,
+ "step": 134
+ },
+ {
+ "epoch": 0.42275574112734865,
+ "grad_norm": 1.0984960794448853,
+ "learning_rate": 4.995408672160031e-06,
+ "loss": 0.6946,
+ "step": 135
+ },
+ {
+ "epoch": 0.42588726513569936,
+ "grad_norm": 1.9341071844100952,
+ "learning_rate": 4.995142648751561e-06,
+ "loss": 0.7467,
+ "step": 136
+ },
+ {
+ "epoch": 0.4290187891440501,
+ "grad_norm": 1.9960932731628418,
+ "learning_rate": 4.9948691415727594e-06,
+ "loss": 0.7379,
+ "step": 137
+ },
+ {
+ "epoch": 0.4321503131524008,
+ "grad_norm": 0.8743917942047119,
+ "learning_rate": 4.994588151443968e-06,
+ "loss": 0.66,
+ "step": 138
+ },
+ {
+ "epoch": 0.4352818371607516,
+ "grad_norm": 0.8655261993408203,
+ "learning_rate": 4.99429967920797e-06,
+ "loss": 0.6646,
+ "step": 139
+ },
+ {
+ "epoch": 0.4384133611691023,
+ "grad_norm": 5.462070941925049,
+ "learning_rate": 4.994003725729992e-06,
+ "loss": 0.643,
+ "step": 140
+ },
+ {
+ "epoch": 0.44154488517745305,
+ "grad_norm": 2.1401469707489014,
+ "learning_rate": 4.993700291897695e-06,
+ "loss": 0.6639,
+ "step": 141
+ },
+ {
+ "epoch": 0.44467640918580376,
+ "grad_norm": 1.8219833374023438,
+ "learning_rate": 4.9933893786211815e-06,
+ "loss": 0.6673,
+ "step": 142
+ },
+ {
+ "epoch": 0.44780793319415446,
+ "grad_norm": 1.641079306602478,
+ "learning_rate": 4.993070986832984e-06,
+ "loss": 0.658,
+ "step": 143
+ },
+ {
+ "epoch": 0.4509394572025052,
+ "grad_norm": 1.1739819049835205,
+ "learning_rate": 4.992745117488066e-06,
+ "loss": 0.6826,
+ "step": 144
+ },
+ {
+ "epoch": 0.45407098121085593,
+ "grad_norm": 2.309185743331909,
+ "learning_rate": 4.9924117715638185e-06,
+ "loss": 0.6536,
+ "step": 145
+ },
+ {
+ "epoch": 0.4572025052192067,
+ "grad_norm": 1.09304940700531,
+ "learning_rate": 4.99207095006006e-06,
+ "loss": 0.721,
+ "step": 146
+ },
+ {
+ "epoch": 0.4603340292275574,
+ "grad_norm": 0.9056984186172485,
+ "learning_rate": 4.991722653999025e-06,
+ "loss": 0.7019,
+ "step": 147
+ },
+ {
+ "epoch": 0.46346555323590816,
+ "grad_norm": 1.8440625667572021,
+ "learning_rate": 4.991366884425374e-06,
+ "loss": 0.707,
+ "step": 148
+ },
+ {
+ "epoch": 0.46659707724425886,
+ "grad_norm": 1.2244676351547241,
+ "learning_rate": 4.991003642406177e-06,
+ "loss": 0.6407,
+ "step": 149
+ },
+ {
+ "epoch": 0.4697286012526096,
+ "grad_norm": 0.9258589744567871,
+ "learning_rate": 4.99063292903092e-06,
+ "loss": 0.6954,
+ "step": 150
+ },
+ {
+ "epoch": 0.47286012526096033,
+ "grad_norm": 4.176390647888184,
+ "learning_rate": 4.990254745411496e-06,
+ "loss": 0.6812,
+ "step": 151
+ },
+ {
+ "epoch": 0.4759916492693111,
+ "grad_norm": 1.4322530031204224,
+ "learning_rate": 4.989869092682205e-06,
+ "loss": 0.6808,
+ "step": 152
+ },
+ {
+ "epoch": 0.4791231732776618,
+ "grad_norm": 0.8017717003822327,
+ "learning_rate": 4.989475971999748e-06,
+ "loss": 0.687,
+ "step": 153
+ },
+ {
+ "epoch": 0.4822546972860125,
+ "grad_norm": 1.5641374588012695,
+ "learning_rate": 4.989075384543228e-06,
+ "loss": 0.6599,
+ "step": 154
+ },
+ {
+ "epoch": 0.48538622129436326,
+ "grad_norm": 1.1522141695022583,
+ "learning_rate": 4.98866733151414e-06,
+ "loss": 0.6546,
+ "step": 155
+ },
+ {
+ "epoch": 0.48851774530271397,
+ "grad_norm": 0.8593171238899231,
+ "learning_rate": 4.988251814136372e-06,
+ "loss": 0.6857,
+ "step": 156
+ },
+ {
+ "epoch": 0.49164926931106473,
+ "grad_norm": 2.668159246444702,
+ "learning_rate": 4.9878288336562e-06,
+ "loss": 0.661,
+ "step": 157
+ },
+ {
+ "epoch": 0.49478079331941544,
+ "grad_norm": 0.9953671097755432,
+ "learning_rate": 4.987398391342285e-06,
+ "loss": 0.6512,
+ "step": 158
+ },
+ {
+ "epoch": 0.4979123173277662,
+ "grad_norm": 1.042872667312622,
+ "learning_rate": 4.986960488485667e-06,
+ "loss": 0.6311,
+ "step": 159
+ },
+ {
+ "epoch": 0.5010438413361169,
+ "grad_norm": 0.9070663452148438,
+ "learning_rate": 4.9865151263997645e-06,
+ "loss": 0.675,
+ "step": 160
+ },
+ {
+ "epoch": 0.5041753653444676,
+ "grad_norm": 0.8460433483123779,
+ "learning_rate": 4.986062306420367e-06,
+ "loss": 0.6635,
+ "step": 161
+ },
+ {
+ "epoch": 0.5073068893528184,
+ "grad_norm": 1.2639834880828857,
+ "learning_rate": 4.985602029905635e-06,
+ "loss": 0.6327,
+ "step": 162
+ },
+ {
+ "epoch": 0.5104384133611691,
+ "grad_norm": 0.8775074481964111,
+ "learning_rate": 4.985134298236091e-06,
+ "loss": 0.644,
+ "step": 163
+ },
+ {
+ "epoch": 0.5135699373695198,
+ "grad_norm": 1.2031961679458618,
+ "learning_rate": 4.98465911281462e-06,
+ "loss": 0.6254,
+ "step": 164
+ },
+ {
+ "epoch": 0.5167014613778705,
+ "grad_norm": 0.892494797706604,
+ "learning_rate": 4.984176475066463e-06,
+ "loss": 0.7122,
+ "step": 165
+ },
+ {
+ "epoch": 0.5198329853862212,
+ "grad_norm": 2.7122485637664795,
+ "learning_rate": 4.983686386439212e-06,
+ "loss": 0.6679,
+ "step": 166
+ },
+ {
+ "epoch": 0.5229645093945721,
+ "grad_norm": 0.9344426989555359,
+ "learning_rate": 4.983188848402806e-06,
+ "loss": 0.6319,
+ "step": 167
+ },
+ {
+ "epoch": 0.5260960334029228,
+ "grad_norm": 1.4093577861785889,
+ "learning_rate": 4.982683862449531e-06,
+ "loss": 0.6425,
+ "step": 168
+ },
+ {
+ "epoch": 0.5292275574112735,
+ "grad_norm": 1.1285009384155273,
+ "learning_rate": 4.982171430094007e-06,
+ "loss": 0.6298,
+ "step": 169
+ },
+ {
+ "epoch": 0.5323590814196242,
+ "grad_norm": 1.952778935432434,
+ "learning_rate": 4.981651552873193e-06,
+ "loss": 0.7066,
+ "step": 170
+ },
+ {
+ "epoch": 0.535490605427975,
+ "grad_norm": 5.133765697479248,
+ "learning_rate": 4.981124232346374e-06,
+ "loss": 0.6634,
+ "step": 171
+ },
+ {
+ "epoch": 0.5386221294363257,
+ "grad_norm": 0.9770542979240417,
+ "learning_rate": 4.980589470095161e-06,
+ "loss": 0.7121,
+ "step": 172
+ },
+ {
+ "epoch": 0.5417536534446764,
+ "grad_norm": 0.8414323925971985,
+ "learning_rate": 4.980047267723487e-06,
+ "loss": 0.6397,
+ "step": 173
+ },
+ {
+ "epoch": 0.5448851774530271,
+ "grad_norm": 1.9173879623413086,
+ "learning_rate": 4.979497626857596e-06,
+ "loss": 0.6228,
+ "step": 174
+ },
+ {
+ "epoch": 0.5480167014613778,
+ "grad_norm": 1.0823363065719604,
+ "learning_rate": 4.978940549146048e-06,
+ "loss": 0.6475,
+ "step": 175
+ },
+ {
+ "epoch": 0.5511482254697286,
+ "grad_norm": 3.715353488922119,
+ "learning_rate": 4.978376036259706e-06,
+ "loss": 0.7127,
+ "step": 176
+ },
+ {
+ "epoch": 0.5542797494780793,
+ "grad_norm": 0.981584370136261,
+ "learning_rate": 4.9778040898917325e-06,
+ "loss": 0.6468,
+ "step": 177
+ },
+ {
+ "epoch": 0.55741127348643,
+ "grad_norm": 1.70566987991333,
+ "learning_rate": 4.977224711757587e-06,
+ "loss": 0.6476,
+ "step": 178
+ },
+ {
+ "epoch": 0.5605427974947808,
+ "grad_norm": 0.9217923283576965,
+ "learning_rate": 4.976637903595019e-06,
+ "loss": 0.6731,
+ "step": 179
+ },
+ {
+ "epoch": 0.5636743215031316,
+ "grad_norm": 0.8994677662849426,
+ "learning_rate": 4.976043667164063e-06,
+ "loss": 0.6562,
+ "step": 180
+ },
+ {
+ "epoch": 0.5668058455114823,
+ "grad_norm": 1.1613017320632935,
+ "learning_rate": 4.975442004247034e-06,
+ "loss": 0.6417,
+ "step": 181
+ },
+ {
+ "epoch": 0.569937369519833,
+ "grad_norm": 1.6041977405548096,
+ "learning_rate": 4.974832916648521e-06,
+ "loss": 0.6029,
+ "step": 182
+ },
+ {
+ "epoch": 0.5730688935281837,
+ "grad_norm": 1.7978405952453613,
+ "learning_rate": 4.974216406195383e-06,
+ "loss": 0.6269,
+ "step": 183
+ },
+ {
+ "epoch": 0.5762004175365344,
+ "grad_norm": 1.6021920442581177,
+ "learning_rate": 4.973592474736739e-06,
+ "loss": 0.6149,
+ "step": 184
+ },
+ {
+ "epoch": 0.5793319415448852,
+ "grad_norm": 0.8973568677902222,
+ "learning_rate": 4.972961124143971e-06,
+ "loss": 0.6648,
+ "step": 185
+ },
+ {
+ "epoch": 0.5824634655532359,
+ "grad_norm": 1.9432591199874878,
+ "learning_rate": 4.972322356310711e-06,
+ "loss": 0.6299,
+ "step": 186
+ },
+ {
+ "epoch": 0.5855949895615866,
+ "grad_norm": 4.457028388977051,
+ "learning_rate": 4.971676173152839e-06,
+ "loss": 0.656,
+ "step": 187
+ },
+ {
+ "epoch": 0.5887265135699373,
+ "grad_norm": 2.0989716053009033,
+ "learning_rate": 4.971022576608473e-06,
+ "loss": 0.6539,
+ "step": 188
+ },
+ {
+ "epoch": 0.5918580375782881,
+ "grad_norm": 1.0646967887878418,
+ "learning_rate": 4.97036156863797e-06,
+ "loss": 0.6727,
+ "step": 189
+ },
+ {
+ "epoch": 0.5949895615866388,
+ "grad_norm": 1.6522265672683716,
+ "learning_rate": 4.969693151223914e-06,
+ "loss": 0.6643,
+ "step": 190
+ },
+ {
+ "epoch": 0.5981210855949896,
+ "grad_norm": 1.7503505945205688,
+ "learning_rate": 4.969017326371115e-06,
+ "loss": 0.6402,
+ "step": 191
+ },
+ {
+ "epoch": 0.6012526096033403,
+ "grad_norm": 1.2341989278793335,
+ "learning_rate": 4.968334096106597e-06,
+ "loss": 0.6413,
+ "step": 192
+ },
+ {
+ "epoch": 0.6043841336116911,
+ "grad_norm": 3.089054584503174,
+ "learning_rate": 4.967643462479597e-06,
+ "loss": 0.6825,
+ "step": 193
+ },
+ {
+ "epoch": 0.6075156576200418,
+ "grad_norm": 2.711623430252075,
+ "learning_rate": 4.966945427561557e-06,
+ "loss": 0.65,
+ "step": 194
+ },
+ {
+ "epoch": 0.6106471816283925,
+ "grad_norm": 4.641184329986572,
+ "learning_rate": 4.966239993446118e-06,
+ "loss": 0.6229,
+ "step": 195
+ },
+ {
+ "epoch": 0.6137787056367432,
+ "grad_norm": 1.7984074354171753,
+ "learning_rate": 4.965527162249114e-06,
+ "loss": 0.6473,
+ "step": 196
+ },
+ {
+ "epoch": 0.6169102296450939,
+ "grad_norm": 1.1643115282058716,
+ "learning_rate": 4.964806936108566e-06,
+ "loss": 0.6404,
+ "step": 197
+ },
+ {
+ "epoch": 0.6200417536534447,
+ "grad_norm": 2.1877920627593994,
+ "learning_rate": 4.9640793171846725e-06,
+ "loss": 0.6185,
+ "step": 198
+ },
+ {
+ "epoch": 0.6231732776617954,
+ "grad_norm": 1.7970566749572754,
+ "learning_rate": 4.963344307659807e-06,
+ "loss": 0.634,
+ "step": 199
+ },
+ {
+ "epoch": 0.6263048016701461,
+ "grad_norm": 1.6014361381530762,
+ "learning_rate": 4.96260190973851e-06,
+ "loss": 0.6562,
+ "step": 200
+ },
+ {
+ "epoch": 0.6294363256784968,
+ "grad_norm": 0.8743320107460022,
+ "learning_rate": 4.961852125647482e-06,
+ "loss": 0.6133,
+ "step": 201
+ },
+ {
+ "epoch": 0.6325678496868476,
+ "grad_norm": 1.9526551961898804,
+ "learning_rate": 4.961094957635578e-06,
+ "loss": 0.6451,
+ "step": 202
+ },
+ {
+ "epoch": 0.6356993736951984,
+ "grad_norm": 3.6597347259521484,
+ "learning_rate": 4.960330407973798e-06,
+ "loss": 0.6386,
+ "step": 203
+ },
+ {
+ "epoch": 0.6388308977035491,
+ "grad_norm": 1.7180207967758179,
+ "learning_rate": 4.959558478955283e-06,
+ "loss": 0.6688,
+ "step": 204
+ },
+ {
+ "epoch": 0.6419624217118998,
+ "grad_norm": 0.9058470129966736,
+ "learning_rate": 4.958779172895308e-06,
+ "loss": 0.6161,
+ "step": 205
+ },
+ {
+ "epoch": 0.6450939457202505,
+ "grad_norm": 1.0031033754348755,
+ "learning_rate": 4.957992492131274e-06,
+ "loss": 0.6437,
+ "step": 206
+ },
+ {
+ "epoch": 0.6482254697286013,
+ "grad_norm": 1.5846725702285767,
+ "learning_rate": 4.9571984390226985e-06,
+ "loss": 0.6332,
+ "step": 207
+ },
+ {
+ "epoch": 0.651356993736952,
+ "grad_norm": 1.9951609373092651,
+ "learning_rate": 4.956397015951215e-06,
+ "loss": 0.636,
+ "step": 208
+ },
+ {
+ "epoch": 0.6544885177453027,
+ "grad_norm": 1.4122583866119385,
+ "learning_rate": 4.95558822532056e-06,
+ "loss": 0.6586,
+ "step": 209
+ },
+ {
+ "epoch": 0.6576200417536534,
+ "grad_norm": 1.2243481874465942,
+ "learning_rate": 4.954772069556568e-06,
+ "loss": 0.6313,
+ "step": 210
+ },
+ {
+ "epoch": 0.6607515657620042,
+ "grad_norm": 0.8756356835365295,
+ "learning_rate": 4.953948551107164e-06,
+ "loss": 0.6406,
+ "step": 211
+ },
+ {
+ "epoch": 0.6638830897703549,
+ "grad_norm": 2.9979734420776367,
+ "learning_rate": 4.953117672442356e-06,
+ "loss": 0.5803,
+ "step": 212
+ },
+ {
+ "epoch": 0.6670146137787056,
+ "grad_norm": 2.1859359741210938,
+ "learning_rate": 4.952279436054229e-06,
+ "loss": 0.6607,
+ "step": 213
+ },
+ {
+ "epoch": 0.6701461377870563,
+ "grad_norm": 0.6929755806922913,
+ "learning_rate": 4.9514338444569346e-06,
+ "loss": 0.5989,
+ "step": 214
+ },
+ {
+ "epoch": 0.673277661795407,
+ "grad_norm": 1.0361783504486084,
+ "learning_rate": 4.950580900186685e-06,
+ "loss": 0.6654,
+ "step": 215
+ },
+ {
+ "epoch": 0.6764091858037579,
+ "grad_norm": 1.210898518562317,
+ "learning_rate": 4.9497206058017475e-06,
+ "loss": 0.6213,
+ "step": 216
+ },
+ {
+ "epoch": 0.6795407098121086,
+ "grad_norm": 1.200990080833435,
+ "learning_rate": 4.948852963882434e-06,
+ "loss": 0.6654,
+ "step": 217
+ },
+ {
+ "epoch": 0.6826722338204593,
+ "grad_norm": 1.481831669807434,
+ "learning_rate": 4.947977977031093e-06,
+ "loss": 0.6474,
+ "step": 218
+ },
+ {
+ "epoch": 0.68580375782881,
+ "grad_norm": 0.9883334636688232,
+ "learning_rate": 4.947095647872103e-06,
+ "loss": 0.6735,
+ "step": 219
+ },
+ {
+ "epoch": 0.6889352818371608,
+ "grad_norm": 0.7436536550521851,
+ "learning_rate": 4.946205979051868e-06,
+ "loss": 0.6456,
+ "step": 220
+ },
+ {
+ "epoch": 0.6920668058455115,
+ "grad_norm": 0.9057570099830627,
+ "learning_rate": 4.945308973238802e-06,
+ "loss": 0.6228,
+ "step": 221
+ },
+ {
+ "epoch": 0.6951983298538622,
+ "grad_norm": 1.341081142425537,
+ "learning_rate": 4.944404633123324e-06,
+ "loss": 0.6417,
+ "step": 222
+ },
+ {
+ "epoch": 0.6983298538622129,
+ "grad_norm": 0.7958157062530518,
+ "learning_rate": 4.943492961417859e-06,
+ "loss": 0.6494,
+ "step": 223
+ },
+ {
+ "epoch": 0.7014613778705637,
+ "grad_norm": 1.216025471687317,
+ "learning_rate": 4.9425739608568106e-06,
+ "loss": 0.6566,
+ "step": 224
+ },
+ {
+ "epoch": 0.7045929018789144,
+ "grad_norm": 0.9774854779243469,
+ "learning_rate": 4.9416476341965735e-06,
+ "loss": 0.6171,
+ "step": 225
+ },
+ {
+ "epoch": 0.7077244258872651,
+ "grad_norm": 2.1562681198120117,
+ "learning_rate": 4.940713984215512e-06,
+ "loss": 0.629,
+ "step": 226
+ },
+ {
+ "epoch": 0.7108559498956158,
+ "grad_norm": 1.9521286487579346,
+ "learning_rate": 4.9397730137139556e-06,
+ "loss": 0.6475,
+ "step": 227
+ },
+ {
+ "epoch": 0.7139874739039666,
+ "grad_norm": 1.5749104022979736,
+ "learning_rate": 4.9388247255141895e-06,
+ "loss": 0.6053,
+ "step": 228
+ },
+ {
+ "epoch": 0.7171189979123174,
+ "grad_norm": 1.2008254528045654,
+ "learning_rate": 4.937869122460449e-06,
+ "loss": 0.6052,
+ "step": 229
+ },
+ {
+ "epoch": 0.7202505219206681,
+ "grad_norm": 1.0774102210998535,
+ "learning_rate": 4.93690620741891e-06,
+ "loss": 0.6099,
+ "step": 230
+ },
+ {
+ "epoch": 0.7233820459290188,
+ "grad_norm": 1.0929996967315674,
+ "learning_rate": 4.935935983277675e-06,
+ "loss": 0.6363,
+ "step": 231
+ },
+ {
+ "epoch": 0.7265135699373695,
+ "grad_norm": 0.8830653429031372,
+ "learning_rate": 4.934958452946774e-06,
+ "loss": 0.6136,
+ "step": 232
+ },
+ {
+ "epoch": 0.7296450939457203,
+ "grad_norm": 3.591218948364258,
+ "learning_rate": 4.933973619358147e-06,
+ "loss": 0.5962,
+ "step": 233
+ },
+ {
+ "epoch": 0.732776617954071,
+ "grad_norm": 2.5797672271728516,
+ "learning_rate": 4.932981485465643e-06,
+ "loss": 0.6405,
+ "step": 234
+ },
+ {
+ "epoch": 0.7359081419624217,
+ "grad_norm": 1.0467664003372192,
+ "learning_rate": 4.9319820542450025e-06,
+ "loss": 0.6155,
+ "step": 235
+ },
+ {
+ "epoch": 0.7390396659707724,
+ "grad_norm": 0.8099795579910278,
+ "learning_rate": 4.930975328693856e-06,
+ "loss": 0.5615,
+ "step": 236
+ },
+ {
+ "epoch": 0.7421711899791231,
+ "grad_norm": 0.8906702995300293,
+ "learning_rate": 4.92996131183171e-06,
+ "loss": 0.6501,
+ "step": 237
+ },
+ {
+ "epoch": 0.7453027139874739,
+ "grad_norm": 1.0871416330337524,
+ "learning_rate": 4.928940006699944e-06,
+ "loss": 0.6282,
+ "step": 238
+ },
+ {
+ "epoch": 0.7484342379958246,
+ "grad_norm": 1.3209614753723145,
+ "learning_rate": 4.927911416361792e-06,
+ "loss": 0.598,
+ "step": 239
+ },
+ {
+ "epoch": 0.7515657620041754,
+ "grad_norm": 1.2252682447433472,
+ "learning_rate": 4.926875543902344e-06,
+ "loss": 0.6433,
+ "step": 240
+ },
+ {
+ "epoch": 0.7546972860125261,
+ "grad_norm": 1.0569007396697998,
+ "learning_rate": 4.9258323924285285e-06,
+ "loss": 0.5927,
+ "step": 241
+ },
+ {
+ "epoch": 0.7578288100208769,
+ "grad_norm": 0.9309014081954956,
+ "learning_rate": 4.924781965069106e-06,
+ "loss": 0.5927,
+ "step": 242
+ },
+ {
+ "epoch": 0.7609603340292276,
+ "grad_norm": 1.0200378894805908,
+ "learning_rate": 4.923724264974662e-06,
+ "loss": 0.6064,
+ "step": 243
+ },
+ {
+ "epoch": 0.7640918580375783,
+ "grad_norm": 1.0533075332641602,
+ "learning_rate": 4.922659295317593e-06,
+ "loss": 0.6373,
+ "step": 244
+ },
+ {
+ "epoch": 0.767223382045929,
+ "grad_norm": 0.7889382839202881,
+ "learning_rate": 4.921587059292102e-06,
+ "loss": 0.5887,
+ "step": 245
+ },
+ {
+ "epoch": 0.7703549060542797,
+ "grad_norm": 0.7943588495254517,
+ "learning_rate": 4.920507560114183e-06,
+ "loss": 0.593,
+ "step": 246
+ },
+ {
+ "epoch": 0.7734864300626305,
+ "grad_norm": 0.8247205018997192,
+ "learning_rate": 4.919420801021617e-06,
+ "loss": 0.6151,
+ "step": 247
+ },
+ {
+ "epoch": 0.7766179540709812,
+ "grad_norm": 0.9979158043861389,
+ "learning_rate": 4.91832678527396e-06,
+ "loss": 0.6019,
+ "step": 248
+ },
+ {
+ "epoch": 0.7797494780793319,
+ "grad_norm": 0.9346868991851807,
+ "learning_rate": 4.917225516152532e-06,
+ "loss": 0.6098,
+ "step": 249
+ },
+ {
+ "epoch": 0.7828810020876826,
+ "grad_norm": 0.7487881183624268,
+ "learning_rate": 4.916116996960408e-06,
+ "loss": 0.5965,
+ "step": 250
+ },
+ {
+ "epoch": 0.7860125260960334,
+ "grad_norm": 0.821576714515686,
+ "learning_rate": 4.915001231022411e-06,
+ "loss": 0.6483,
+ "step": 251
+ },
+ {
+ "epoch": 0.7891440501043842,
+ "grad_norm": 1.0413196086883545,
+ "learning_rate": 4.913878221685096e-06,
+ "loss": 0.6108,
+ "step": 252
+ },
+ {
+ "epoch": 0.7922755741127349,
+ "grad_norm": 0.9560331702232361,
+ "learning_rate": 4.912747972316745e-06,
+ "loss": 0.5758,
+ "step": 253
+ },
+ {
+ "epoch": 0.7954070981210856,
+ "grad_norm": 0.8964638113975525,
+ "learning_rate": 4.911610486307356e-06,
+ "loss": 0.6432,
+ "step": 254
+ },
+ {
+ "epoch": 0.7985386221294363,
+ "grad_norm": 0.8418346047401428,
+ "learning_rate": 4.910465767068631e-06,
+ "loss": 0.6027,
+ "step": 255
+ },
+ {
+ "epoch": 0.8016701461377871,
+ "grad_norm": 1.792371153831482,
+ "learning_rate": 4.909313818033966e-06,
+ "loss": 0.6198,
+ "step": 256
+ },
+ {
+ "epoch": 0.8048016701461378,
+ "grad_norm": 1.036665439605713,
+ "learning_rate": 4.908154642658446e-06,
+ "loss": 0.6255,
+ "step": 257
+ },
+ {
+ "epoch": 0.8079331941544885,
+ "grad_norm": 0.7592151165008545,
+ "learning_rate": 4.906988244418823e-06,
+ "loss": 0.6035,
+ "step": 258
+ },
+ {
+ "epoch": 0.8110647181628392,
+ "grad_norm": 0.8843073844909668,
+ "learning_rate": 4.90581462681352e-06,
+ "loss": 0.6299,
+ "step": 259
+ },
+ {
+ "epoch": 0.81419624217119,
+ "grad_norm": 0.9489964246749878,
+ "learning_rate": 4.9046337933626086e-06,
+ "loss": 0.5869,
+ "step": 260
+ },
+ {
+ "epoch": 0.8173277661795407,
+ "grad_norm": 0.851691722869873,
+ "learning_rate": 4.903445747607806e-06,
+ "loss": 0.603,
+ "step": 261
+ },
+ {
+ "epoch": 0.8204592901878914,
+ "grad_norm": 1.3722106218338013,
+ "learning_rate": 4.902250493112458e-06,
+ "loss": 0.5939,
+ "step": 262
+ },
+ {
+ "epoch": 0.8235908141962421,
+ "grad_norm": 1.1002827882766724,
+ "learning_rate": 4.901048033461537e-06,
+ "loss": 0.6452,
+ "step": 263
+ },
+ {
+ "epoch": 0.826722338204593,
+ "grad_norm": 0.8428632020950317,
+ "learning_rate": 4.89983837226162e-06,
+ "loss": 0.5956,
+ "step": 264
+ },
+ {
+ "epoch": 0.8298538622129437,
+ "grad_norm": 0.7666584849357605,
+ "learning_rate": 4.898621513140889e-06,
+ "loss": 0.6067,
+ "step": 265
+ },
+ {
+ "epoch": 0.8329853862212944,
+ "grad_norm": 0.8413611054420471,
+ "learning_rate": 4.897397459749113e-06,
+ "loss": 0.5985,
+ "step": 266
+ },
+ {
+ "epoch": 0.8361169102296451,
+ "grad_norm": 2.3374335765838623,
+ "learning_rate": 4.896166215757638e-06,
+ "loss": 0.5885,
+ "step": 267
+ },
+ {
+ "epoch": 0.8392484342379958,
+ "grad_norm": 2.236640214920044,
+ "learning_rate": 4.894927784859377e-06,
+ "loss": 0.6408,
+ "step": 268
+ },
+ {
+ "epoch": 0.8423799582463466,
+ "grad_norm": 0.9715856313705444,
+ "learning_rate": 4.893682170768802e-06,
+ "loss": 0.5954,
+ "step": 269
+ },
+ {
+ "epoch": 0.8455114822546973,
+ "grad_norm": 1.0249912738800049,
+ "learning_rate": 4.892429377221928e-06,
+ "loss": 0.6186,
+ "step": 270
+ },
+ {
+ "epoch": 0.848643006263048,
+ "grad_norm": 1.255426049232483,
+ "learning_rate": 4.891169407976302e-06,
+ "loss": 0.6351,
+ "step": 271
+ },
+ {
+ "epoch": 0.8517745302713987,
+ "grad_norm": 0.9339559674263,
+ "learning_rate": 4.889902266810995e-06,
+ "loss": 0.5944,
+ "step": 272
+ },
+ {
+ "epoch": 0.8549060542797495,
+ "grad_norm": 1.2473429441452026,
+ "learning_rate": 4.888627957526589e-06,
+ "loss": 0.544,
+ "step": 273
+ },
+ {
+ "epoch": 0.8580375782881002,
+ "grad_norm": 1.0589442253112793,
+ "learning_rate": 4.887346483945166e-06,
+ "loss": 0.5543,
+ "step": 274
+ },
+ {
+ "epoch": 0.8611691022964509,
+ "grad_norm": 0.9844024777412415,
+ "learning_rate": 4.886057849910294e-06,
+ "loss": 0.5941,
+ "step": 275
+ },
+ {
+ "epoch": 0.8643006263048016,
+ "grad_norm": 2.88578200340271,
+ "learning_rate": 4.8847620592870196e-06,
+ "loss": 0.6124,
+ "step": 276
+ },
+ {
+ "epoch": 0.8674321503131524,
+ "grad_norm": 0.7496054172515869,
+ "learning_rate": 4.8834591159618524e-06,
+ "loss": 0.6006,
+ "step": 277
+ },
+ {
+ "epoch": 0.8705636743215032,
+ "grad_norm": 0.7403052449226379,
+ "learning_rate": 4.88214902384276e-06,
+ "loss": 0.5911,
+ "step": 278
+ },
+ {
+ "epoch": 0.8736951983298539,
+ "grad_norm": 0.9003771543502808,
+ "learning_rate": 4.880831786859146e-06,
+ "loss": 0.6347,
+ "step": 279
+ },
+ {
+ "epoch": 0.8768267223382046,
+ "grad_norm": 1.0345501899719238,
+ "learning_rate": 4.879507408961847e-06,
+ "loss": 0.6111,
+ "step": 280
+ },
+ {
+ "epoch": 0.8799582463465553,
+ "grad_norm": 1.4385879039764404,
+ "learning_rate": 4.878175894123116e-06,
+ "loss": 0.6454,
+ "step": 281
+ },
+ {
+ "epoch": 0.8830897703549061,
+ "grad_norm": 0.8469482064247131,
+ "learning_rate": 4.8768372463366145e-06,
+ "loss": 0.6163,
+ "step": 282
+ },
+ {
+ "epoch": 0.8862212943632568,
+ "grad_norm": 0.8859589695930481,
+ "learning_rate": 4.875491469617395e-06,
+ "loss": 0.6144,
+ "step": 283
+ },
+ {
+ "epoch": 0.8893528183716075,
+ "grad_norm": 1.8436834812164307,
+ "learning_rate": 4.874138568001895e-06,
+ "loss": 0.6275,
+ "step": 284
+ },
+ {
+ "epoch": 0.8924843423799582,
+ "grad_norm": 0.6646101474761963,
+ "learning_rate": 4.87277854554792e-06,
+ "loss": 0.615,
+ "step": 285
+ },
+ {
+ "epoch": 0.8956158663883089,
+ "grad_norm": 1.0070925951004028,
+ "learning_rate": 4.871411406334633e-06,
+ "loss": 0.5898,
+ "step": 286
+ },
+ {
+ "epoch": 0.8987473903966597,
+ "grad_norm": 0.9785194993019104,
+ "learning_rate": 4.870037154462545e-06,
+ "loss": 0.5992,
+ "step": 287
+ },
+ {
+ "epoch": 0.9018789144050104,
+ "grad_norm": 0.7244889736175537,
+ "learning_rate": 4.868655794053497e-06,
+ "loss": 0.6078,
+ "step": 288
+ },
+ {
+ "epoch": 0.9050104384133612,
+ "grad_norm": 1.4496444463729858,
+ "learning_rate": 4.8672673292506535e-06,
+ "loss": 0.5855,
+ "step": 289
+ },
+ {
+ "epoch": 0.9081419624217119,
+ "grad_norm": 1.8514957427978516,
+ "learning_rate": 4.865871764218486e-06,
+ "loss": 0.5707,
+ "step": 290
+ },
+ {
+ "epoch": 0.9112734864300627,
+ "grad_norm": 0.8439773321151733,
+ "learning_rate": 4.864469103142763e-06,
+ "loss": 0.5562,
+ "step": 291
+ },
+ {
+ "epoch": 0.9144050104384134,
+ "grad_norm": 0.8146086931228638,
+ "learning_rate": 4.8630593502305355e-06,
+ "loss": 0.6161,
+ "step": 292
+ },
+ {
+ "epoch": 0.9175365344467641,
+ "grad_norm": 0.8920315504074097,
+ "learning_rate": 4.861642509710126e-06,
+ "loss": 0.6139,
+ "step": 293
+ },
+ {
+ "epoch": 0.9206680584551148,
+ "grad_norm": 1.4980088472366333,
+ "learning_rate": 4.860218585831116e-06,
+ "loss": 0.6187,
+ "step": 294
+ },
+ {
+ "epoch": 0.9237995824634656,
+ "grad_norm": 0.9910127520561218,
+ "learning_rate": 4.8587875828643285e-06,
+ "loss": 0.5852,
+ "step": 295
+ },
+ {
+ "epoch": 0.9269311064718163,
+ "grad_norm": 0.819600522518158,
+ "learning_rate": 4.857349505101823e-06,
+ "loss": 0.6172,
+ "step": 296
+ },
+ {
+ "epoch": 0.930062630480167,
+ "grad_norm": 1.1059772968292236,
+ "learning_rate": 4.855904356856878e-06,
+ "loss": 0.5868,
+ "step": 297
+ },
+ {
+ "epoch": 0.9331941544885177,
+ "grad_norm": 1.2362196445465088,
+ "learning_rate": 4.854452142463977e-06,
+ "loss": 0.625,
+ "step": 298
+ },
+ {
+ "epoch": 0.9363256784968684,
+ "grad_norm": 0.9956470727920532,
+ "learning_rate": 4.852992866278799e-06,
+ "loss": 0.5923,
+ "step": 299
+ },
+ {
+ "epoch": 0.9394572025052192,
+ "grad_norm": 0.864109218120575,
+ "learning_rate": 4.851526532678203e-06,
+ "loss": 0.6315,
+ "step": 300
+ },
+ {
+ "epoch": 0.94258872651357,
+ "grad_norm": 0.8900614380836487,
+ "learning_rate": 4.850053146060217e-06,
+ "loss": 0.6128,
+ "step": 301
+ },
+ {
+ "epoch": 0.9457202505219207,
+ "grad_norm": 0.927254855632782,
+ "learning_rate": 4.84857271084402e-06,
+ "loss": 0.5955,
+ "step": 302
+ },
+ {
+ "epoch": 0.9488517745302714,
+ "grad_norm": 1.0046517848968506,
+ "learning_rate": 4.847085231469935e-06,
+ "loss": 0.6134,
+ "step": 303
+ },
+ {
+ "epoch": 0.9519832985386222,
+ "grad_norm": 0.734597384929657,
+ "learning_rate": 4.8455907123994125e-06,
+ "loss": 0.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.9551148225469729,
+ "grad_norm": 0.7338348031044006,
+ "learning_rate": 4.844089158115016e-06,
+ "loss": 0.5897,
+ "step": 305
+ },
+ {
+ "epoch": 0.9582463465553236,
+ "grad_norm": 0.9163988828659058,
+ "learning_rate": 4.8425805731204106e-06,
+ "loss": 0.6051,
+ "step": 306
+ },
+ {
+ "epoch": 0.9613778705636743,
+ "grad_norm": 1.050246238708496,
+ "learning_rate": 4.84106496194035e-06,
+ "loss": 0.5751,
+ "step": 307
+ },
+ {
+ "epoch": 0.964509394572025,
+ "grad_norm": 0.7637603878974915,
+ "learning_rate": 4.83954232912066e-06,
+ "loss": 0.5677,
+ "step": 308
+ },
+ {
+ "epoch": 0.9676409185803758,
+ "grad_norm": 0.7110525965690613,
+ "learning_rate": 4.838012679228229e-06,
+ "loss": 0.6051,
+ "step": 309
+ },
+ {
+ "epoch": 0.9707724425887265,
+ "grad_norm": 0.7662068605422974,
+ "learning_rate": 4.836476016850988e-06,
+ "loss": 0.59,
+ "step": 310
+ },
+ {
+ "epoch": 0.9739039665970772,
+ "grad_norm": 0.8907375335693359,
+ "learning_rate": 4.834932346597906e-06,
+ "loss": 0.5792,
+ "step": 311
+ },
+ {
+ "epoch": 0.9770354906054279,
+ "grad_norm": 0.8939849138259888,
+ "learning_rate": 4.833381673098966e-06,
+ "loss": 0.6062,
+ "step": 312
+ },
+ {
+ "epoch": 0.9801670146137788,
+ "grad_norm": 0.8878788948059082,
+ "learning_rate": 4.8318240010051595e-06,
+ "loss": 0.5694,
+ "step": 313
+ },
+ {
+ "epoch": 0.9832985386221295,
+ "grad_norm": 1.2523870468139648,
+ "learning_rate": 4.830259334988468e-06,
+ "loss": 0.5809,
+ "step": 314
+ },
+ {
+ "epoch": 0.9864300626304802,
+ "grad_norm": 1.0836797952651978,
+ "learning_rate": 4.82868767974185e-06,
+ "loss": 0.5949,
+ "step": 315
+ },
+ {
+ "epoch": 0.9895615866388309,
+ "grad_norm": 0.7985473871231079,
+ "learning_rate": 4.827109039979226e-06,
+ "loss": 0.6057,
+ "step": 316
+ },
+ {
+ "epoch": 0.9926931106471816,
+ "grad_norm": 1.042951226234436,
+ "learning_rate": 4.825523420435469e-06,
+ "loss": 0.6004,
+ "step": 317
+ },
+ {
+ "epoch": 0.9958246346555324,
+ "grad_norm": 0.7845115661621094,
+ "learning_rate": 4.823930825866381e-06,
+ "loss": 0.6161,
+ "step": 318
+ },
+ {
+ "epoch": 0.9989561586638831,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.82233126104869e-06,
+ "loss": 0.5912,
+ "step": 319
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1914,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 319,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 9.681902049591034e+18,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-319/training_args.bin b/checkpoint-319/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..8067ee9c1c0bc752bdfd00cfcaf1a6e717d2356b
--- /dev/null
+++ b/checkpoint-319/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c650156a192cae0a2070c4323ee8a93e9b52fb76041d59ae0633b98389585727
+size 7928
diff --git a/checkpoint-319/zero_to_fp32.py b/checkpoint-319/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-319/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-638/README.md b/checkpoint-638/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4a3934800eeb082a0cb833d7b6af4f68eed3615
--- /dev/null
+++ b/checkpoint-638/README.md
@@ -0,0 +1,202 @@
+---
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-638/adapter_config.json b/checkpoint-638/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/checkpoint-638/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-638/adapter_model.safetensors b/checkpoint-638/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..35842b9651b371450da9928a14475aeb7e3f269c
--- /dev/null
+++ b/checkpoint-638/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0812464ec0eceb2dda23eb97599723c8b0355ef227db40ef5b2f930dc4ef6186
+size 9016826528
diff --git a/checkpoint-638/global_step637/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-638/global_step637/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..dca4213777557e059dd0d043ae7d375bea6ea395
--- /dev/null
+++ b/checkpoint-638/global_step637/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d9173edb4c9ebf323ced262faf58fd939e9af4f3604110f45aa3753c063f5caf
+size 27050164444
diff --git a/checkpoint-638/global_step637/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-638/global_step637/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..da98948aa0c11dedf9fe16150a1506186fcc309d
--- /dev/null
+++ b/checkpoint-638/global_step637/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a7edd77adf20792561b7de39e823bcff5fdd6be758d10ad71579e00e7b00cb90
+size 27050169884
diff --git a/checkpoint-638/global_step637/mp_rank_00_model_states.pt b/checkpoint-638/global_step637/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..ecc121bdbbe2508b39af8bbafa231faaa36ea1e7
--- /dev/null
+++ b/checkpoint-638/global_step637/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ac91ce79a28628551a8979923f07d5118365f62a421e59c08c015b4bdae41290
+size 9776788601
diff --git a/checkpoint-638/latest b/checkpoint-638/latest
new file mode 100644
index 0000000000000000000000000000000000000000..d012d0ce403671314e126e6402c39bcddcfe6480
--- /dev/null
+++ b/checkpoint-638/latest
@@ -0,0 +1 @@
+global_step637
\ No newline at end of file
diff --git a/checkpoint-638/rng_state_0.pth b/checkpoint-638/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..d84c77535f8c7a4c2676f9274073d68c3007aeba
--- /dev/null
+++ b/checkpoint-638/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4c1befa2d454c450cd89379bc71c69f965ee33aca6ae6a7a757d8f3f6d6f8b70
+size 14512
diff --git a/checkpoint-638/rng_state_1.pth b/checkpoint-638/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..c0de96118a7a564558659b7142cfe50a9a694f2b
--- /dev/null
+++ b/checkpoint-638/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0e2e65f53006556f6db1a94f122adf6f3b167ecff4df64a9d2980168de08b7e1
+size 14512
diff --git a/checkpoint-638/scheduler.pt b/checkpoint-638/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..580124aead66a410b0fed61fa136800732c3f226
--- /dev/null
+++ b/checkpoint-638/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d8c0b9ca4dbe6069aa5d148004daaceb5003d77e6d5d93cc67757781a96bf7a7
+size 1064
diff --git a/checkpoint-638/special_tokens_map.json b/checkpoint-638/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-638/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-638/tokenizer.json b/checkpoint-638/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-638/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-638/tokenizer_config.json b/checkpoint-638/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/checkpoint-638/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-638/trainer_state.json b/checkpoint-638/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..572435ec3e53693dcf53e3286d185caaee1b1c44
--- /dev/null
+++ b/checkpoint-638/trainer_state.json
@@ -0,0 +1,4499 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.9958246346555324,
+ "eval_steps": 500,
+ "global_step": 638,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.003131524008350731,
+ "grad_norm": 13.917898178100586,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 4.1051,
+ "step": 1
+ },
+ {
+ "epoch": 0.006263048016701462,
+ "grad_norm": 17.327869415283203,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 4.1048,
+ "step": 2
+ },
+ {
+ "epoch": 0.009394572025052192,
+ "grad_norm": 14.063946723937988,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 4.0741,
+ "step": 3
+ },
+ {
+ "epoch": 0.012526096033402923,
+ "grad_norm": 16.817699432373047,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 4.2002,
+ "step": 4
+ },
+ {
+ "epoch": 0.015657620041753653,
+ "grad_norm": 14.47036361694336,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 4.2652,
+ "step": 5
+ },
+ {
+ "epoch": 0.018789144050104383,
+ "grad_norm": 14.474193572998047,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 4.0888,
+ "step": 6
+ },
+ {
+ "epoch": 0.021920668058455117,
+ "grad_norm": 14.865458488464355,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 4.0014,
+ "step": 7
+ },
+ {
+ "epoch": 0.025052192066805846,
+ "grad_norm": 15.338888168334961,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 4.13,
+ "step": 8
+ },
+ {
+ "epoch": 0.028183716075156576,
+ "grad_norm": 15.154336929321289,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 4.2493,
+ "step": 9
+ },
+ {
+ "epoch": 0.031315240083507306,
+ "grad_norm": 15.919597625732422,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 4.0535,
+ "step": 10
+ },
+ {
+ "epoch": 0.03444676409185804,
+ "grad_norm": 14.981926918029785,
+ "learning_rate": 5.5e-07,
+ "loss": 3.9064,
+ "step": 11
+ },
+ {
+ "epoch": 0.037578288100208766,
+ "grad_norm": 13.36101245880127,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 4.1939,
+ "step": 12
+ },
+ {
+ "epoch": 0.0407098121085595,
+ "grad_norm": 15.58773422241211,
+ "learning_rate": 6.5e-07,
+ "loss": 4.18,
+ "step": 13
+ },
+ {
+ "epoch": 0.04384133611691023,
+ "grad_norm": 13.560139656066895,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 3.9414,
+ "step": 14
+ },
+ {
+ "epoch": 0.04697286012526096,
+ "grad_norm": 12.307971954345703,
+ "learning_rate": 7.5e-07,
+ "loss": 3.8836,
+ "step": 15
+ },
+ {
+ "epoch": 0.05010438413361169,
+ "grad_norm": 14.533182144165039,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 4.1551,
+ "step": 16
+ },
+ {
+ "epoch": 0.05323590814196242,
+ "grad_norm": 13.453729629516602,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 4.0048,
+ "step": 17
+ },
+ {
+ "epoch": 0.05636743215031315,
+ "grad_norm": 13.45992374420166,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 4.0745,
+ "step": 18
+ },
+ {
+ "epoch": 0.059498956158663886,
+ "grad_norm": 11.857145309448242,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 3.9871,
+ "step": 19
+ },
+ {
+ "epoch": 0.06263048016701461,
+ "grad_norm": 11.872294425964355,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 3.8959,
+ "step": 20
+ },
+ {
+ "epoch": 0.06576200417536535,
+ "grad_norm": 12.969825744628906,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 4.0308,
+ "step": 21
+ },
+ {
+ "epoch": 0.06889352818371608,
+ "grad_norm": 12.33769416809082,
+ "learning_rate": 1.1e-06,
+ "loss": 3.9341,
+ "step": 22
+ },
+ {
+ "epoch": 0.0720250521920668,
+ "grad_norm": 12.669405937194824,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 3.8511,
+ "step": 23
+ },
+ {
+ "epoch": 0.07515657620041753,
+ "grad_norm": 10.677213668823242,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 3.7764,
+ "step": 24
+ },
+ {
+ "epoch": 0.07828810020876827,
+ "grad_norm": 10.366402626037598,
+ "learning_rate": 1.25e-06,
+ "loss": 3.5291,
+ "step": 25
+ },
+ {
+ "epoch": 0.081419624217119,
+ "grad_norm": 11.211421012878418,
+ "learning_rate": 1.3e-06,
+ "loss": 3.5765,
+ "step": 26
+ },
+ {
+ "epoch": 0.08455114822546973,
+ "grad_norm": 11.313716888427734,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 3.4849,
+ "step": 27
+ },
+ {
+ "epoch": 0.08768267223382047,
+ "grad_norm": 10.41294002532959,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 3.2653,
+ "step": 28
+ },
+ {
+ "epoch": 0.09081419624217119,
+ "grad_norm": 10.40064525604248,
+ "learning_rate": 1.45e-06,
+ "loss": 3.3384,
+ "step": 29
+ },
+ {
+ "epoch": 0.09394572025052192,
+ "grad_norm": 10.05427074432373,
+ "learning_rate": 1.5e-06,
+ "loss": 3.2257,
+ "step": 30
+ },
+ {
+ "epoch": 0.09707724425887265,
+ "grad_norm": 9.583163261413574,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 3.1371,
+ "step": 31
+ },
+ {
+ "epoch": 0.10020876826722339,
+ "grad_norm": 10.09977912902832,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 3.0658,
+ "step": 32
+ },
+ {
+ "epoch": 0.10334029227557412,
+ "grad_norm": 9.271486282348633,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 2.9693,
+ "step": 33
+ },
+ {
+ "epoch": 0.10647181628392484,
+ "grad_norm": 10.687992095947266,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 2.95,
+ "step": 34
+ },
+ {
+ "epoch": 0.10960334029227557,
+ "grad_norm": 8.762290000915527,
+ "learning_rate": 1.75e-06,
+ "loss": 2.8286,
+ "step": 35
+ },
+ {
+ "epoch": 0.1127348643006263,
+ "grad_norm": 10.13785171508789,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 2.3664,
+ "step": 36
+ },
+ {
+ "epoch": 0.11586638830897704,
+ "grad_norm": 18.301353454589844,
+ "learning_rate": 1.85e-06,
+ "loss": 2.5533,
+ "step": 37
+ },
+ {
+ "epoch": 0.11899791231732777,
+ "grad_norm": 11.490377426147461,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 2.6133,
+ "step": 38
+ },
+ {
+ "epoch": 0.12212943632567849,
+ "grad_norm": 15.614163398742676,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 2.3596,
+ "step": 39
+ },
+ {
+ "epoch": 0.12526096033402923,
+ "grad_norm": 17.757442474365234,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 2.3491,
+ "step": 40
+ },
+ {
+ "epoch": 0.12839248434237996,
+ "grad_norm": 17.18431854248047,
+ "learning_rate": 2.05e-06,
+ "loss": 2.2361,
+ "step": 41
+ },
+ {
+ "epoch": 0.1315240083507307,
+ "grad_norm": 16.149789810180664,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 2.1457,
+ "step": 42
+ },
+ {
+ "epoch": 0.13465553235908143,
+ "grad_norm": 15.256914138793945,
+ "learning_rate": 2.15e-06,
+ "loss": 2.12,
+ "step": 43
+ },
+ {
+ "epoch": 0.13778705636743216,
+ "grad_norm": 15.537406921386719,
+ "learning_rate": 2.2e-06,
+ "loss": 2.1877,
+ "step": 44
+ },
+ {
+ "epoch": 0.1409185803757829,
+ "grad_norm": 7.947713851928711,
+ "learning_rate": 2.25e-06,
+ "loss": 2.1648,
+ "step": 45
+ },
+ {
+ "epoch": 0.1440501043841336,
+ "grad_norm": 8.818676948547363,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 2.134,
+ "step": 46
+ },
+ {
+ "epoch": 0.14718162839248433,
+ "grad_norm": 5.175768852233887,
+ "learning_rate": 2.35e-06,
+ "loss": 2.0796,
+ "step": 47
+ },
+ {
+ "epoch": 0.15031315240083507,
+ "grad_norm": 6.750611305236816,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 1.9174,
+ "step": 48
+ },
+ {
+ "epoch": 0.1534446764091858,
+ "grad_norm": 6.2147979736328125,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 1.8065,
+ "step": 49
+ },
+ {
+ "epoch": 0.15657620041753653,
+ "grad_norm": 13.291611671447754,
+ "learning_rate": 2.5e-06,
+ "loss": 1.7061,
+ "step": 50
+ },
+ {
+ "epoch": 0.15970772442588727,
+ "grad_norm": 7.251201629638672,
+ "learning_rate": 2.55e-06,
+ "loss": 1.7924,
+ "step": 51
+ },
+ {
+ "epoch": 0.162839248434238,
+ "grad_norm": 5.2126054763793945,
+ "learning_rate": 2.6e-06,
+ "loss": 1.6735,
+ "step": 52
+ },
+ {
+ "epoch": 0.16597077244258873,
+ "grad_norm": 5.435528755187988,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 1.6265,
+ "step": 53
+ },
+ {
+ "epoch": 0.16910229645093947,
+ "grad_norm": 4.505807399749756,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 1.4851,
+ "step": 54
+ },
+ {
+ "epoch": 0.1722338204592902,
+ "grad_norm": 5.128388404846191,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 1.5832,
+ "step": 55
+ },
+ {
+ "epoch": 0.17536534446764093,
+ "grad_norm": 16.935827255249023,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 1.6553,
+ "step": 56
+ },
+ {
+ "epoch": 0.17849686847599164,
+ "grad_norm": 3.664458990097046,
+ "learning_rate": 2.85e-06,
+ "loss": 1.5,
+ "step": 57
+ },
+ {
+ "epoch": 0.18162839248434237,
+ "grad_norm": 7.763802528381348,
+ "learning_rate": 2.9e-06,
+ "loss": 1.367,
+ "step": 58
+ },
+ {
+ "epoch": 0.1847599164926931,
+ "grad_norm": 3.2216155529022217,
+ "learning_rate": 2.95e-06,
+ "loss": 1.3863,
+ "step": 59
+ },
+ {
+ "epoch": 0.18789144050104384,
+ "grad_norm": 4.384445667266846,
+ "learning_rate": 3e-06,
+ "loss": 1.4247,
+ "step": 60
+ },
+ {
+ "epoch": 0.19102296450939457,
+ "grad_norm": 4.8080878257751465,
+ "learning_rate": 3.05e-06,
+ "loss": 1.3257,
+ "step": 61
+ },
+ {
+ "epoch": 0.1941544885177453,
+ "grad_norm": 4.154761791229248,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 1.321,
+ "step": 62
+ },
+ {
+ "epoch": 0.19728601252609604,
+ "grad_norm": 6.4742112159729,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 1.2823,
+ "step": 63
+ },
+ {
+ "epoch": 0.20041753653444677,
+ "grad_norm": 2.583422899246216,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 1.2136,
+ "step": 64
+ },
+ {
+ "epoch": 0.2035490605427975,
+ "grad_norm": 4.1933488845825195,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 1.1855,
+ "step": 65
+ },
+ {
+ "epoch": 0.20668058455114824,
+ "grad_norm": 4.11049747467041,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 1.2389,
+ "step": 66
+ },
+ {
+ "epoch": 0.20981210855949894,
+ "grad_norm": 2.264458417892456,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 1.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.21294363256784968,
+ "grad_norm": 2.5408174991607666,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 1.1389,
+ "step": 68
+ },
+ {
+ "epoch": 0.2160751565762004,
+ "grad_norm": 7.82421350479126,
+ "learning_rate": 3.45e-06,
+ "loss": 1.0956,
+ "step": 69
+ },
+ {
+ "epoch": 0.21920668058455114,
+ "grad_norm": 3.070939064025879,
+ "learning_rate": 3.5e-06,
+ "loss": 1.0451,
+ "step": 70
+ },
+ {
+ "epoch": 0.22233820459290188,
+ "grad_norm": 2.6310527324676514,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 1.0538,
+ "step": 71
+ },
+ {
+ "epoch": 0.2254697286012526,
+ "grad_norm": 7.630155563354492,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 1.0052,
+ "step": 72
+ },
+ {
+ "epoch": 0.22860125260960334,
+ "grad_norm": 6.950636863708496,
+ "learning_rate": 3.65e-06,
+ "loss": 1.0473,
+ "step": 73
+ },
+ {
+ "epoch": 0.23173277661795408,
+ "grad_norm": 2.2703945636749268,
+ "learning_rate": 3.7e-06,
+ "loss": 1.0576,
+ "step": 74
+ },
+ {
+ "epoch": 0.2348643006263048,
+ "grad_norm": 3.3817710876464844,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 1.0177,
+ "step": 75
+ },
+ {
+ "epoch": 0.23799582463465555,
+ "grad_norm": 7.266414642333984,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 1.0645,
+ "step": 76
+ },
+ {
+ "epoch": 0.24112734864300625,
+ "grad_norm": 5.782608509063721,
+ "learning_rate": 3.85e-06,
+ "loss": 1.0162,
+ "step": 77
+ },
+ {
+ "epoch": 0.24425887265135698,
+ "grad_norm": 2.7938575744628906,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.9664,
+ "step": 78
+ },
+ {
+ "epoch": 0.24739039665970772,
+ "grad_norm": 6.681935787200928,
+ "learning_rate": 3.95e-06,
+ "loss": 0.953,
+ "step": 79
+ },
+ {
+ "epoch": 0.25052192066805845,
+ "grad_norm": 2.253279209136963,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.9568,
+ "step": 80
+ },
+ {
+ "epoch": 0.2536534446764092,
+ "grad_norm": 1.4875826835632324,
+ "learning_rate": 4.05e-06,
+ "loss": 0.9448,
+ "step": 81
+ },
+ {
+ "epoch": 0.2567849686847599,
+ "grad_norm": 2.4987940788269043,
+ "learning_rate": 4.1e-06,
+ "loss": 0.9393,
+ "step": 82
+ },
+ {
+ "epoch": 0.2599164926931106,
+ "grad_norm": 4.712948322296143,
+ "learning_rate": 4.15e-06,
+ "loss": 0.9532,
+ "step": 83
+ },
+ {
+ "epoch": 0.2630480167014614,
+ "grad_norm": 6.9030632972717285,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.96,
+ "step": 84
+ },
+ {
+ "epoch": 0.2661795407098121,
+ "grad_norm": 3.4780967235565186,
+ "learning_rate": 4.25e-06,
+ "loss": 0.8993,
+ "step": 85
+ },
+ {
+ "epoch": 0.26931106471816285,
+ "grad_norm": 1.526064395904541,
+ "learning_rate": 4.3e-06,
+ "loss": 0.9021,
+ "step": 86
+ },
+ {
+ "epoch": 0.27244258872651356,
+ "grad_norm": 10.727686882019043,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.856,
+ "step": 87
+ },
+ {
+ "epoch": 0.2755741127348643,
+ "grad_norm": 12.483160972595215,
+ "learning_rate": 4.4e-06,
+ "loss": 0.9357,
+ "step": 88
+ },
+ {
+ "epoch": 0.278705636743215,
+ "grad_norm": 6.544492244720459,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.9168,
+ "step": 89
+ },
+ {
+ "epoch": 0.2818371607515658,
+ "grad_norm": 1.178139567375183,
+ "learning_rate": 4.5e-06,
+ "loss": 0.8748,
+ "step": 90
+ },
+ {
+ "epoch": 0.2849686847599165,
+ "grad_norm": 1.711506962776184,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.8425,
+ "step": 91
+ },
+ {
+ "epoch": 0.2881002087682672,
+ "grad_norm": 3.281747341156006,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.8491,
+ "step": 92
+ },
+ {
+ "epoch": 0.29123173277661796,
+ "grad_norm": 2.2964377403259277,
+ "learning_rate": 4.65e-06,
+ "loss": 0.8038,
+ "step": 93
+ },
+ {
+ "epoch": 0.29436325678496866,
+ "grad_norm": 1.959700345993042,
+ "learning_rate": 4.7e-06,
+ "loss": 0.8439,
+ "step": 94
+ },
+ {
+ "epoch": 0.2974947807933194,
+ "grad_norm": 3.979384183883667,
+ "learning_rate": 4.75e-06,
+ "loss": 0.8839,
+ "step": 95
+ },
+ {
+ "epoch": 0.30062630480167013,
+ "grad_norm": 1.4721262454986572,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.845,
+ "step": 96
+ },
+ {
+ "epoch": 0.3037578288100209,
+ "grad_norm": 2.862248659133911,
+ "learning_rate": 4.85e-06,
+ "loss": 0.7748,
+ "step": 97
+ },
+ {
+ "epoch": 0.3068893528183716,
+ "grad_norm": 3.7439088821411133,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.8145,
+ "step": 98
+ },
+ {
+ "epoch": 0.31002087682672236,
+ "grad_norm": 1.6654618978500366,
+ "learning_rate": 4.95e-06,
+ "loss": 0.8326,
+ "step": 99
+ },
+ {
+ "epoch": 0.31315240083507306,
+ "grad_norm": 7.8437581062316895,
+ "learning_rate": 5e-06,
+ "loss": 0.8666,
+ "step": 100
+ },
+ {
+ "epoch": 0.3162839248434238,
+ "grad_norm": 6.429738521575928,
+ "learning_rate": 4.999996250830422e-06,
+ "loss": 0.836,
+ "step": 101
+ },
+ {
+ "epoch": 0.31941544885177453,
+ "grad_norm": 2.6017794609069824,
+ "learning_rate": 4.9999850033329326e-06,
+ "loss": 0.7785,
+ "step": 102
+ },
+ {
+ "epoch": 0.32254697286012524,
+ "grad_norm": 1.0575449466705322,
+ "learning_rate": 4.999966257541265e-06,
+ "loss": 0.7639,
+ "step": 103
+ },
+ {
+ "epoch": 0.325678496868476,
+ "grad_norm": 2.6932010650634766,
+ "learning_rate": 4.999940013511647e-06,
+ "loss": 0.8214,
+ "step": 104
+ },
+ {
+ "epoch": 0.3288100208768267,
+ "grad_norm": 2.925288438796997,
+ "learning_rate": 4.999906271322792e-06,
+ "loss": 0.8797,
+ "step": 105
+ },
+ {
+ "epoch": 0.33194154488517746,
+ "grad_norm": 1.3570607900619507,
+ "learning_rate": 4.9998650310759035e-06,
+ "loss": 0.792,
+ "step": 106
+ },
+ {
+ "epoch": 0.33507306889352817,
+ "grad_norm": 5.126713752746582,
+ "learning_rate": 4.999816292894676e-06,
+ "loss": 0.8352,
+ "step": 107
+ },
+ {
+ "epoch": 0.33820459290187893,
+ "grad_norm": 1.8966432809829712,
+ "learning_rate": 4.99976005692529e-06,
+ "loss": 0.7663,
+ "step": 108
+ },
+ {
+ "epoch": 0.34133611691022964,
+ "grad_norm": 1.3100829124450684,
+ "learning_rate": 4.999696323336418e-06,
+ "loss": 0.771,
+ "step": 109
+ },
+ {
+ "epoch": 0.3444676409185804,
+ "grad_norm": 2.4025354385375977,
+ "learning_rate": 4.999625092319218e-06,
+ "loss": 0.7618,
+ "step": 110
+ },
+ {
+ "epoch": 0.3475991649269311,
+ "grad_norm": 1.130232810974121,
+ "learning_rate": 4.999546364087334e-06,
+ "loss": 0.7705,
+ "step": 111
+ },
+ {
+ "epoch": 0.35073068893528186,
+ "grad_norm": 3.430262327194214,
+ "learning_rate": 4.999460138876901e-06,
+ "loss": 0.77,
+ "step": 112
+ },
+ {
+ "epoch": 0.35386221294363257,
+ "grad_norm": 1.1272103786468506,
+ "learning_rate": 4.999366416946536e-06,
+ "loss": 0.7133,
+ "step": 113
+ },
+ {
+ "epoch": 0.3569937369519833,
+ "grad_norm": 1.1740471124649048,
+ "learning_rate": 4.999265198577342e-06,
+ "loss": 0.7684,
+ "step": 114
+ },
+ {
+ "epoch": 0.36012526096033404,
+ "grad_norm": 1.3138248920440674,
+ "learning_rate": 4.999156484072907e-06,
+ "loss": 0.7888,
+ "step": 115
+ },
+ {
+ "epoch": 0.36325678496868474,
+ "grad_norm": 1.061711311340332,
+ "learning_rate": 4.999040273759304e-06,
+ "loss": 0.7484,
+ "step": 116
+ },
+ {
+ "epoch": 0.3663883089770355,
+ "grad_norm": 1.4682390689849854,
+ "learning_rate": 4.998916567985083e-06,
+ "loss": 0.7296,
+ "step": 117
+ },
+ {
+ "epoch": 0.3695198329853862,
+ "grad_norm": 2.884068250656128,
+ "learning_rate": 4.998785367121284e-06,
+ "loss": 0.7662,
+ "step": 118
+ },
+ {
+ "epoch": 0.37265135699373697,
+ "grad_norm": 0.9812761545181274,
+ "learning_rate": 4.9986466715614205e-06,
+ "loss": 0.7307,
+ "step": 119
+ },
+ {
+ "epoch": 0.3757828810020877,
+ "grad_norm": 2.2237496376037598,
+ "learning_rate": 4.998500481721484e-06,
+ "loss": 0.6761,
+ "step": 120
+ },
+ {
+ "epoch": 0.37891440501043844,
+ "grad_norm": 1.4004178047180176,
+ "learning_rate": 4.998346798039952e-06,
+ "loss": 0.7505,
+ "step": 121
+ },
+ {
+ "epoch": 0.38204592901878914,
+ "grad_norm": 5.54975700378418,
+ "learning_rate": 4.99818562097777e-06,
+ "loss": 0.7615,
+ "step": 122
+ },
+ {
+ "epoch": 0.38517745302713985,
+ "grad_norm": 6.17140531539917,
+ "learning_rate": 4.9980169510183624e-06,
+ "loss": 0.7002,
+ "step": 123
+ },
+ {
+ "epoch": 0.3883089770354906,
+ "grad_norm": 4.974380016326904,
+ "learning_rate": 4.997840788667628e-06,
+ "loss": 0.7449,
+ "step": 124
+ },
+ {
+ "epoch": 0.3914405010438413,
+ "grad_norm": 1.4133399724960327,
+ "learning_rate": 4.997657134453937e-06,
+ "loss": 0.7442,
+ "step": 125
+ },
+ {
+ "epoch": 0.3945720250521921,
+ "grad_norm": 1.868915319442749,
+ "learning_rate": 4.9974659889281295e-06,
+ "loss": 0.7104,
+ "step": 126
+ },
+ {
+ "epoch": 0.3977035490605428,
+ "grad_norm": 1.2599350214004517,
+ "learning_rate": 4.997267352663514e-06,
+ "loss": 0.7385,
+ "step": 127
+ },
+ {
+ "epoch": 0.40083507306889354,
+ "grad_norm": 1.4353641271591187,
+ "learning_rate": 4.997061226255869e-06,
+ "loss": 0.7081,
+ "step": 128
+ },
+ {
+ "epoch": 0.40396659707724425,
+ "grad_norm": 3.2492141723632812,
+ "learning_rate": 4.996847610323437e-06,
+ "loss": 0.7859,
+ "step": 129
+ },
+ {
+ "epoch": 0.407098121085595,
+ "grad_norm": 9.599719047546387,
+ "learning_rate": 4.996626505506923e-06,
+ "loss": 0.7241,
+ "step": 130
+ },
+ {
+ "epoch": 0.4102296450939457,
+ "grad_norm": 10.053650856018066,
+ "learning_rate": 4.996397912469494e-06,
+ "loss": 0.6841,
+ "step": 131
+ },
+ {
+ "epoch": 0.4133611691022965,
+ "grad_norm": 1.323876976966858,
+ "learning_rate": 4.996161831896777e-06,
+ "loss": 0.7317,
+ "step": 132
+ },
+ {
+ "epoch": 0.4164926931106472,
+ "grad_norm": 1.4180598258972168,
+ "learning_rate": 4.9959182644968594e-06,
+ "loss": 0.692,
+ "step": 133
+ },
+ {
+ "epoch": 0.4196242171189979,
+ "grad_norm": 1.2194396257400513,
+ "learning_rate": 4.99566721100028e-06,
+ "loss": 0.7068,
+ "step": 134
+ },
+ {
+ "epoch": 0.42275574112734865,
+ "grad_norm": 1.0984960794448853,
+ "learning_rate": 4.995408672160031e-06,
+ "loss": 0.6946,
+ "step": 135
+ },
+ {
+ "epoch": 0.42588726513569936,
+ "grad_norm": 1.9341071844100952,
+ "learning_rate": 4.995142648751561e-06,
+ "loss": 0.7467,
+ "step": 136
+ },
+ {
+ "epoch": 0.4290187891440501,
+ "grad_norm": 1.9960932731628418,
+ "learning_rate": 4.9948691415727594e-06,
+ "loss": 0.7379,
+ "step": 137
+ },
+ {
+ "epoch": 0.4321503131524008,
+ "grad_norm": 0.8743917942047119,
+ "learning_rate": 4.994588151443968e-06,
+ "loss": 0.66,
+ "step": 138
+ },
+ {
+ "epoch": 0.4352818371607516,
+ "grad_norm": 0.8655261993408203,
+ "learning_rate": 4.99429967920797e-06,
+ "loss": 0.6646,
+ "step": 139
+ },
+ {
+ "epoch": 0.4384133611691023,
+ "grad_norm": 5.462070941925049,
+ "learning_rate": 4.994003725729992e-06,
+ "loss": 0.643,
+ "step": 140
+ },
+ {
+ "epoch": 0.44154488517745305,
+ "grad_norm": 2.1401469707489014,
+ "learning_rate": 4.993700291897695e-06,
+ "loss": 0.6639,
+ "step": 141
+ },
+ {
+ "epoch": 0.44467640918580376,
+ "grad_norm": 1.8219833374023438,
+ "learning_rate": 4.9933893786211815e-06,
+ "loss": 0.6673,
+ "step": 142
+ },
+ {
+ "epoch": 0.44780793319415446,
+ "grad_norm": 1.641079306602478,
+ "learning_rate": 4.993070986832984e-06,
+ "loss": 0.658,
+ "step": 143
+ },
+ {
+ "epoch": 0.4509394572025052,
+ "grad_norm": 1.1739819049835205,
+ "learning_rate": 4.992745117488066e-06,
+ "loss": 0.6826,
+ "step": 144
+ },
+ {
+ "epoch": 0.45407098121085593,
+ "grad_norm": 2.309185743331909,
+ "learning_rate": 4.9924117715638185e-06,
+ "loss": 0.6536,
+ "step": 145
+ },
+ {
+ "epoch": 0.4572025052192067,
+ "grad_norm": 1.09304940700531,
+ "learning_rate": 4.99207095006006e-06,
+ "loss": 0.721,
+ "step": 146
+ },
+ {
+ "epoch": 0.4603340292275574,
+ "grad_norm": 0.9056984186172485,
+ "learning_rate": 4.991722653999025e-06,
+ "loss": 0.7019,
+ "step": 147
+ },
+ {
+ "epoch": 0.46346555323590816,
+ "grad_norm": 1.8440625667572021,
+ "learning_rate": 4.991366884425374e-06,
+ "loss": 0.707,
+ "step": 148
+ },
+ {
+ "epoch": 0.46659707724425886,
+ "grad_norm": 1.2244676351547241,
+ "learning_rate": 4.991003642406177e-06,
+ "loss": 0.6407,
+ "step": 149
+ },
+ {
+ "epoch": 0.4697286012526096,
+ "grad_norm": 0.9258589744567871,
+ "learning_rate": 4.99063292903092e-06,
+ "loss": 0.6954,
+ "step": 150
+ },
+ {
+ "epoch": 0.47286012526096033,
+ "grad_norm": 4.176390647888184,
+ "learning_rate": 4.990254745411496e-06,
+ "loss": 0.6812,
+ "step": 151
+ },
+ {
+ "epoch": 0.4759916492693111,
+ "grad_norm": 1.4322530031204224,
+ "learning_rate": 4.989869092682205e-06,
+ "loss": 0.6808,
+ "step": 152
+ },
+ {
+ "epoch": 0.4791231732776618,
+ "grad_norm": 0.8017717003822327,
+ "learning_rate": 4.989475971999748e-06,
+ "loss": 0.687,
+ "step": 153
+ },
+ {
+ "epoch": 0.4822546972860125,
+ "grad_norm": 1.5641374588012695,
+ "learning_rate": 4.989075384543228e-06,
+ "loss": 0.6599,
+ "step": 154
+ },
+ {
+ "epoch": 0.48538622129436326,
+ "grad_norm": 1.1522141695022583,
+ "learning_rate": 4.98866733151414e-06,
+ "loss": 0.6546,
+ "step": 155
+ },
+ {
+ "epoch": 0.48851774530271397,
+ "grad_norm": 0.8593171238899231,
+ "learning_rate": 4.988251814136372e-06,
+ "loss": 0.6857,
+ "step": 156
+ },
+ {
+ "epoch": 0.49164926931106473,
+ "grad_norm": 2.668159246444702,
+ "learning_rate": 4.9878288336562e-06,
+ "loss": 0.661,
+ "step": 157
+ },
+ {
+ "epoch": 0.49478079331941544,
+ "grad_norm": 0.9953671097755432,
+ "learning_rate": 4.987398391342285e-06,
+ "loss": 0.6512,
+ "step": 158
+ },
+ {
+ "epoch": 0.4979123173277662,
+ "grad_norm": 1.042872667312622,
+ "learning_rate": 4.986960488485667e-06,
+ "loss": 0.6311,
+ "step": 159
+ },
+ {
+ "epoch": 0.5010438413361169,
+ "grad_norm": 0.9070663452148438,
+ "learning_rate": 4.9865151263997645e-06,
+ "loss": 0.675,
+ "step": 160
+ },
+ {
+ "epoch": 0.5041753653444676,
+ "grad_norm": 0.8460433483123779,
+ "learning_rate": 4.986062306420367e-06,
+ "loss": 0.6635,
+ "step": 161
+ },
+ {
+ "epoch": 0.5073068893528184,
+ "grad_norm": 1.2639834880828857,
+ "learning_rate": 4.985602029905635e-06,
+ "loss": 0.6327,
+ "step": 162
+ },
+ {
+ "epoch": 0.5104384133611691,
+ "grad_norm": 0.8775074481964111,
+ "learning_rate": 4.985134298236091e-06,
+ "loss": 0.644,
+ "step": 163
+ },
+ {
+ "epoch": 0.5135699373695198,
+ "grad_norm": 1.2031961679458618,
+ "learning_rate": 4.98465911281462e-06,
+ "loss": 0.6254,
+ "step": 164
+ },
+ {
+ "epoch": 0.5167014613778705,
+ "grad_norm": 0.892494797706604,
+ "learning_rate": 4.984176475066463e-06,
+ "loss": 0.7122,
+ "step": 165
+ },
+ {
+ "epoch": 0.5198329853862212,
+ "grad_norm": 2.7122485637664795,
+ "learning_rate": 4.983686386439212e-06,
+ "loss": 0.6679,
+ "step": 166
+ },
+ {
+ "epoch": 0.5229645093945721,
+ "grad_norm": 0.9344426989555359,
+ "learning_rate": 4.983188848402806e-06,
+ "loss": 0.6319,
+ "step": 167
+ },
+ {
+ "epoch": 0.5260960334029228,
+ "grad_norm": 1.4093577861785889,
+ "learning_rate": 4.982683862449531e-06,
+ "loss": 0.6425,
+ "step": 168
+ },
+ {
+ "epoch": 0.5292275574112735,
+ "grad_norm": 1.1285009384155273,
+ "learning_rate": 4.982171430094007e-06,
+ "loss": 0.6298,
+ "step": 169
+ },
+ {
+ "epoch": 0.5323590814196242,
+ "grad_norm": 1.952778935432434,
+ "learning_rate": 4.981651552873193e-06,
+ "loss": 0.7066,
+ "step": 170
+ },
+ {
+ "epoch": 0.535490605427975,
+ "grad_norm": 5.133765697479248,
+ "learning_rate": 4.981124232346374e-06,
+ "loss": 0.6634,
+ "step": 171
+ },
+ {
+ "epoch": 0.5386221294363257,
+ "grad_norm": 0.9770542979240417,
+ "learning_rate": 4.980589470095161e-06,
+ "loss": 0.7121,
+ "step": 172
+ },
+ {
+ "epoch": 0.5417536534446764,
+ "grad_norm": 0.8414323925971985,
+ "learning_rate": 4.980047267723487e-06,
+ "loss": 0.6397,
+ "step": 173
+ },
+ {
+ "epoch": 0.5448851774530271,
+ "grad_norm": 1.9173879623413086,
+ "learning_rate": 4.979497626857596e-06,
+ "loss": 0.6228,
+ "step": 174
+ },
+ {
+ "epoch": 0.5480167014613778,
+ "grad_norm": 1.0823363065719604,
+ "learning_rate": 4.978940549146048e-06,
+ "loss": 0.6475,
+ "step": 175
+ },
+ {
+ "epoch": 0.5511482254697286,
+ "grad_norm": 3.715353488922119,
+ "learning_rate": 4.978376036259706e-06,
+ "loss": 0.7127,
+ "step": 176
+ },
+ {
+ "epoch": 0.5542797494780793,
+ "grad_norm": 0.981584370136261,
+ "learning_rate": 4.9778040898917325e-06,
+ "loss": 0.6468,
+ "step": 177
+ },
+ {
+ "epoch": 0.55741127348643,
+ "grad_norm": 1.70566987991333,
+ "learning_rate": 4.977224711757587e-06,
+ "loss": 0.6476,
+ "step": 178
+ },
+ {
+ "epoch": 0.5605427974947808,
+ "grad_norm": 0.9217923283576965,
+ "learning_rate": 4.976637903595019e-06,
+ "loss": 0.6731,
+ "step": 179
+ },
+ {
+ "epoch": 0.5636743215031316,
+ "grad_norm": 0.8994677662849426,
+ "learning_rate": 4.976043667164063e-06,
+ "loss": 0.6562,
+ "step": 180
+ },
+ {
+ "epoch": 0.5668058455114823,
+ "grad_norm": 1.1613017320632935,
+ "learning_rate": 4.975442004247034e-06,
+ "loss": 0.6417,
+ "step": 181
+ },
+ {
+ "epoch": 0.569937369519833,
+ "grad_norm": 1.6041977405548096,
+ "learning_rate": 4.974832916648521e-06,
+ "loss": 0.6029,
+ "step": 182
+ },
+ {
+ "epoch": 0.5730688935281837,
+ "grad_norm": 1.7978405952453613,
+ "learning_rate": 4.974216406195383e-06,
+ "loss": 0.6269,
+ "step": 183
+ },
+ {
+ "epoch": 0.5762004175365344,
+ "grad_norm": 1.6021920442581177,
+ "learning_rate": 4.973592474736739e-06,
+ "loss": 0.6149,
+ "step": 184
+ },
+ {
+ "epoch": 0.5793319415448852,
+ "grad_norm": 0.8973568677902222,
+ "learning_rate": 4.972961124143971e-06,
+ "loss": 0.6648,
+ "step": 185
+ },
+ {
+ "epoch": 0.5824634655532359,
+ "grad_norm": 1.9432591199874878,
+ "learning_rate": 4.972322356310711e-06,
+ "loss": 0.6299,
+ "step": 186
+ },
+ {
+ "epoch": 0.5855949895615866,
+ "grad_norm": 4.457028388977051,
+ "learning_rate": 4.971676173152839e-06,
+ "loss": 0.656,
+ "step": 187
+ },
+ {
+ "epoch": 0.5887265135699373,
+ "grad_norm": 2.0989716053009033,
+ "learning_rate": 4.971022576608473e-06,
+ "loss": 0.6539,
+ "step": 188
+ },
+ {
+ "epoch": 0.5918580375782881,
+ "grad_norm": 1.0646967887878418,
+ "learning_rate": 4.97036156863797e-06,
+ "loss": 0.6727,
+ "step": 189
+ },
+ {
+ "epoch": 0.5949895615866388,
+ "grad_norm": 1.6522265672683716,
+ "learning_rate": 4.969693151223914e-06,
+ "loss": 0.6643,
+ "step": 190
+ },
+ {
+ "epoch": 0.5981210855949896,
+ "grad_norm": 1.7503505945205688,
+ "learning_rate": 4.969017326371115e-06,
+ "loss": 0.6402,
+ "step": 191
+ },
+ {
+ "epoch": 0.6012526096033403,
+ "grad_norm": 1.2341989278793335,
+ "learning_rate": 4.968334096106597e-06,
+ "loss": 0.6413,
+ "step": 192
+ },
+ {
+ "epoch": 0.6043841336116911,
+ "grad_norm": 3.089054584503174,
+ "learning_rate": 4.967643462479597e-06,
+ "loss": 0.6825,
+ "step": 193
+ },
+ {
+ "epoch": 0.6075156576200418,
+ "grad_norm": 2.711623430252075,
+ "learning_rate": 4.966945427561557e-06,
+ "loss": 0.65,
+ "step": 194
+ },
+ {
+ "epoch": 0.6106471816283925,
+ "grad_norm": 4.641184329986572,
+ "learning_rate": 4.966239993446118e-06,
+ "loss": 0.6229,
+ "step": 195
+ },
+ {
+ "epoch": 0.6137787056367432,
+ "grad_norm": 1.7984074354171753,
+ "learning_rate": 4.965527162249114e-06,
+ "loss": 0.6473,
+ "step": 196
+ },
+ {
+ "epoch": 0.6169102296450939,
+ "grad_norm": 1.1643115282058716,
+ "learning_rate": 4.964806936108566e-06,
+ "loss": 0.6404,
+ "step": 197
+ },
+ {
+ "epoch": 0.6200417536534447,
+ "grad_norm": 2.1877920627593994,
+ "learning_rate": 4.9640793171846725e-06,
+ "loss": 0.6185,
+ "step": 198
+ },
+ {
+ "epoch": 0.6231732776617954,
+ "grad_norm": 1.7970566749572754,
+ "learning_rate": 4.963344307659807e-06,
+ "loss": 0.634,
+ "step": 199
+ },
+ {
+ "epoch": 0.6263048016701461,
+ "grad_norm": 1.6014361381530762,
+ "learning_rate": 4.96260190973851e-06,
+ "loss": 0.6562,
+ "step": 200
+ },
+ {
+ "epoch": 0.6294363256784968,
+ "grad_norm": 0.8743320107460022,
+ "learning_rate": 4.961852125647482e-06,
+ "loss": 0.6133,
+ "step": 201
+ },
+ {
+ "epoch": 0.6325678496868476,
+ "grad_norm": 1.9526551961898804,
+ "learning_rate": 4.961094957635578e-06,
+ "loss": 0.6451,
+ "step": 202
+ },
+ {
+ "epoch": 0.6356993736951984,
+ "grad_norm": 3.6597347259521484,
+ "learning_rate": 4.960330407973798e-06,
+ "loss": 0.6386,
+ "step": 203
+ },
+ {
+ "epoch": 0.6388308977035491,
+ "grad_norm": 1.7180207967758179,
+ "learning_rate": 4.959558478955283e-06,
+ "loss": 0.6688,
+ "step": 204
+ },
+ {
+ "epoch": 0.6419624217118998,
+ "grad_norm": 0.9058470129966736,
+ "learning_rate": 4.958779172895308e-06,
+ "loss": 0.6161,
+ "step": 205
+ },
+ {
+ "epoch": 0.6450939457202505,
+ "grad_norm": 1.0031033754348755,
+ "learning_rate": 4.957992492131274e-06,
+ "loss": 0.6437,
+ "step": 206
+ },
+ {
+ "epoch": 0.6482254697286013,
+ "grad_norm": 1.5846725702285767,
+ "learning_rate": 4.9571984390226985e-06,
+ "loss": 0.6332,
+ "step": 207
+ },
+ {
+ "epoch": 0.651356993736952,
+ "grad_norm": 1.9951609373092651,
+ "learning_rate": 4.956397015951215e-06,
+ "loss": 0.636,
+ "step": 208
+ },
+ {
+ "epoch": 0.6544885177453027,
+ "grad_norm": 1.4122583866119385,
+ "learning_rate": 4.95558822532056e-06,
+ "loss": 0.6586,
+ "step": 209
+ },
+ {
+ "epoch": 0.6576200417536534,
+ "grad_norm": 1.2243481874465942,
+ "learning_rate": 4.954772069556568e-06,
+ "loss": 0.6313,
+ "step": 210
+ },
+ {
+ "epoch": 0.6607515657620042,
+ "grad_norm": 0.8756356835365295,
+ "learning_rate": 4.953948551107164e-06,
+ "loss": 0.6406,
+ "step": 211
+ },
+ {
+ "epoch": 0.6638830897703549,
+ "grad_norm": 2.9979734420776367,
+ "learning_rate": 4.953117672442356e-06,
+ "loss": 0.5803,
+ "step": 212
+ },
+ {
+ "epoch": 0.6670146137787056,
+ "grad_norm": 2.1859359741210938,
+ "learning_rate": 4.952279436054229e-06,
+ "loss": 0.6607,
+ "step": 213
+ },
+ {
+ "epoch": 0.6701461377870563,
+ "grad_norm": 0.6929755806922913,
+ "learning_rate": 4.9514338444569346e-06,
+ "loss": 0.5989,
+ "step": 214
+ },
+ {
+ "epoch": 0.673277661795407,
+ "grad_norm": 1.0361783504486084,
+ "learning_rate": 4.950580900186685e-06,
+ "loss": 0.6654,
+ "step": 215
+ },
+ {
+ "epoch": 0.6764091858037579,
+ "grad_norm": 1.210898518562317,
+ "learning_rate": 4.9497206058017475e-06,
+ "loss": 0.6213,
+ "step": 216
+ },
+ {
+ "epoch": 0.6795407098121086,
+ "grad_norm": 1.200990080833435,
+ "learning_rate": 4.948852963882434e-06,
+ "loss": 0.6654,
+ "step": 217
+ },
+ {
+ "epoch": 0.6826722338204593,
+ "grad_norm": 1.481831669807434,
+ "learning_rate": 4.947977977031093e-06,
+ "loss": 0.6474,
+ "step": 218
+ },
+ {
+ "epoch": 0.68580375782881,
+ "grad_norm": 0.9883334636688232,
+ "learning_rate": 4.947095647872103e-06,
+ "loss": 0.6735,
+ "step": 219
+ },
+ {
+ "epoch": 0.6889352818371608,
+ "grad_norm": 0.7436536550521851,
+ "learning_rate": 4.946205979051868e-06,
+ "loss": 0.6456,
+ "step": 220
+ },
+ {
+ "epoch": 0.6920668058455115,
+ "grad_norm": 0.9057570099830627,
+ "learning_rate": 4.945308973238802e-06,
+ "loss": 0.6228,
+ "step": 221
+ },
+ {
+ "epoch": 0.6951983298538622,
+ "grad_norm": 1.341081142425537,
+ "learning_rate": 4.944404633123324e-06,
+ "loss": 0.6417,
+ "step": 222
+ },
+ {
+ "epoch": 0.6983298538622129,
+ "grad_norm": 0.7958157062530518,
+ "learning_rate": 4.943492961417859e-06,
+ "loss": 0.6494,
+ "step": 223
+ },
+ {
+ "epoch": 0.7014613778705637,
+ "grad_norm": 1.216025471687317,
+ "learning_rate": 4.9425739608568106e-06,
+ "loss": 0.6566,
+ "step": 224
+ },
+ {
+ "epoch": 0.7045929018789144,
+ "grad_norm": 0.9774854779243469,
+ "learning_rate": 4.9416476341965735e-06,
+ "loss": 0.6171,
+ "step": 225
+ },
+ {
+ "epoch": 0.7077244258872651,
+ "grad_norm": 2.1562681198120117,
+ "learning_rate": 4.940713984215512e-06,
+ "loss": 0.629,
+ "step": 226
+ },
+ {
+ "epoch": 0.7108559498956158,
+ "grad_norm": 1.9521286487579346,
+ "learning_rate": 4.9397730137139556e-06,
+ "loss": 0.6475,
+ "step": 227
+ },
+ {
+ "epoch": 0.7139874739039666,
+ "grad_norm": 1.5749104022979736,
+ "learning_rate": 4.9388247255141895e-06,
+ "loss": 0.6053,
+ "step": 228
+ },
+ {
+ "epoch": 0.7171189979123174,
+ "grad_norm": 1.2008254528045654,
+ "learning_rate": 4.937869122460449e-06,
+ "loss": 0.6052,
+ "step": 229
+ },
+ {
+ "epoch": 0.7202505219206681,
+ "grad_norm": 1.0774102210998535,
+ "learning_rate": 4.93690620741891e-06,
+ "loss": 0.6099,
+ "step": 230
+ },
+ {
+ "epoch": 0.7233820459290188,
+ "grad_norm": 1.0929996967315674,
+ "learning_rate": 4.935935983277675e-06,
+ "loss": 0.6363,
+ "step": 231
+ },
+ {
+ "epoch": 0.7265135699373695,
+ "grad_norm": 0.8830653429031372,
+ "learning_rate": 4.934958452946774e-06,
+ "loss": 0.6136,
+ "step": 232
+ },
+ {
+ "epoch": 0.7296450939457203,
+ "grad_norm": 3.591218948364258,
+ "learning_rate": 4.933973619358147e-06,
+ "loss": 0.5962,
+ "step": 233
+ },
+ {
+ "epoch": 0.732776617954071,
+ "grad_norm": 2.5797672271728516,
+ "learning_rate": 4.932981485465643e-06,
+ "loss": 0.6405,
+ "step": 234
+ },
+ {
+ "epoch": 0.7359081419624217,
+ "grad_norm": 1.0467664003372192,
+ "learning_rate": 4.9319820542450025e-06,
+ "loss": 0.6155,
+ "step": 235
+ },
+ {
+ "epoch": 0.7390396659707724,
+ "grad_norm": 0.8099795579910278,
+ "learning_rate": 4.930975328693856e-06,
+ "loss": 0.5615,
+ "step": 236
+ },
+ {
+ "epoch": 0.7421711899791231,
+ "grad_norm": 0.8906702995300293,
+ "learning_rate": 4.92996131183171e-06,
+ "loss": 0.6501,
+ "step": 237
+ },
+ {
+ "epoch": 0.7453027139874739,
+ "grad_norm": 1.0871416330337524,
+ "learning_rate": 4.928940006699944e-06,
+ "loss": 0.6282,
+ "step": 238
+ },
+ {
+ "epoch": 0.7484342379958246,
+ "grad_norm": 1.3209614753723145,
+ "learning_rate": 4.927911416361792e-06,
+ "loss": 0.598,
+ "step": 239
+ },
+ {
+ "epoch": 0.7515657620041754,
+ "grad_norm": 1.2252682447433472,
+ "learning_rate": 4.926875543902344e-06,
+ "loss": 0.6433,
+ "step": 240
+ },
+ {
+ "epoch": 0.7546972860125261,
+ "grad_norm": 1.0569007396697998,
+ "learning_rate": 4.9258323924285285e-06,
+ "loss": 0.5927,
+ "step": 241
+ },
+ {
+ "epoch": 0.7578288100208769,
+ "grad_norm": 0.9309014081954956,
+ "learning_rate": 4.924781965069106e-06,
+ "loss": 0.5927,
+ "step": 242
+ },
+ {
+ "epoch": 0.7609603340292276,
+ "grad_norm": 1.0200378894805908,
+ "learning_rate": 4.923724264974662e-06,
+ "loss": 0.6064,
+ "step": 243
+ },
+ {
+ "epoch": 0.7640918580375783,
+ "grad_norm": 1.0533075332641602,
+ "learning_rate": 4.922659295317593e-06,
+ "loss": 0.6373,
+ "step": 244
+ },
+ {
+ "epoch": 0.767223382045929,
+ "grad_norm": 0.7889382839202881,
+ "learning_rate": 4.921587059292102e-06,
+ "loss": 0.5887,
+ "step": 245
+ },
+ {
+ "epoch": 0.7703549060542797,
+ "grad_norm": 0.7943588495254517,
+ "learning_rate": 4.920507560114183e-06,
+ "loss": 0.593,
+ "step": 246
+ },
+ {
+ "epoch": 0.7734864300626305,
+ "grad_norm": 0.8247205018997192,
+ "learning_rate": 4.919420801021617e-06,
+ "loss": 0.6151,
+ "step": 247
+ },
+ {
+ "epoch": 0.7766179540709812,
+ "grad_norm": 0.9979158043861389,
+ "learning_rate": 4.91832678527396e-06,
+ "loss": 0.6019,
+ "step": 248
+ },
+ {
+ "epoch": 0.7797494780793319,
+ "grad_norm": 0.9346868991851807,
+ "learning_rate": 4.917225516152532e-06,
+ "loss": 0.6098,
+ "step": 249
+ },
+ {
+ "epoch": 0.7828810020876826,
+ "grad_norm": 0.7487881183624268,
+ "learning_rate": 4.916116996960408e-06,
+ "loss": 0.5965,
+ "step": 250
+ },
+ {
+ "epoch": 0.7860125260960334,
+ "grad_norm": 0.821576714515686,
+ "learning_rate": 4.915001231022411e-06,
+ "loss": 0.6483,
+ "step": 251
+ },
+ {
+ "epoch": 0.7891440501043842,
+ "grad_norm": 1.0413196086883545,
+ "learning_rate": 4.913878221685096e-06,
+ "loss": 0.6108,
+ "step": 252
+ },
+ {
+ "epoch": 0.7922755741127349,
+ "grad_norm": 0.9560331702232361,
+ "learning_rate": 4.912747972316745e-06,
+ "loss": 0.5758,
+ "step": 253
+ },
+ {
+ "epoch": 0.7954070981210856,
+ "grad_norm": 0.8964638113975525,
+ "learning_rate": 4.911610486307356e-06,
+ "loss": 0.6432,
+ "step": 254
+ },
+ {
+ "epoch": 0.7985386221294363,
+ "grad_norm": 0.8418346047401428,
+ "learning_rate": 4.910465767068631e-06,
+ "loss": 0.6027,
+ "step": 255
+ },
+ {
+ "epoch": 0.8016701461377871,
+ "grad_norm": 1.792371153831482,
+ "learning_rate": 4.909313818033966e-06,
+ "loss": 0.6198,
+ "step": 256
+ },
+ {
+ "epoch": 0.8048016701461378,
+ "grad_norm": 1.036665439605713,
+ "learning_rate": 4.908154642658446e-06,
+ "loss": 0.6255,
+ "step": 257
+ },
+ {
+ "epoch": 0.8079331941544885,
+ "grad_norm": 0.7592151165008545,
+ "learning_rate": 4.906988244418823e-06,
+ "loss": 0.6035,
+ "step": 258
+ },
+ {
+ "epoch": 0.8110647181628392,
+ "grad_norm": 0.8843073844909668,
+ "learning_rate": 4.90581462681352e-06,
+ "loss": 0.6299,
+ "step": 259
+ },
+ {
+ "epoch": 0.81419624217119,
+ "grad_norm": 0.9489964246749878,
+ "learning_rate": 4.9046337933626086e-06,
+ "loss": 0.5869,
+ "step": 260
+ },
+ {
+ "epoch": 0.8173277661795407,
+ "grad_norm": 0.851691722869873,
+ "learning_rate": 4.903445747607806e-06,
+ "loss": 0.603,
+ "step": 261
+ },
+ {
+ "epoch": 0.8204592901878914,
+ "grad_norm": 1.3722106218338013,
+ "learning_rate": 4.902250493112458e-06,
+ "loss": 0.5939,
+ "step": 262
+ },
+ {
+ "epoch": 0.8235908141962421,
+ "grad_norm": 1.1002827882766724,
+ "learning_rate": 4.901048033461537e-06,
+ "loss": 0.6452,
+ "step": 263
+ },
+ {
+ "epoch": 0.826722338204593,
+ "grad_norm": 0.8428632020950317,
+ "learning_rate": 4.89983837226162e-06,
+ "loss": 0.5956,
+ "step": 264
+ },
+ {
+ "epoch": 0.8298538622129437,
+ "grad_norm": 0.7666584849357605,
+ "learning_rate": 4.898621513140889e-06,
+ "loss": 0.6067,
+ "step": 265
+ },
+ {
+ "epoch": 0.8329853862212944,
+ "grad_norm": 0.8413611054420471,
+ "learning_rate": 4.897397459749113e-06,
+ "loss": 0.5985,
+ "step": 266
+ },
+ {
+ "epoch": 0.8361169102296451,
+ "grad_norm": 2.3374335765838623,
+ "learning_rate": 4.896166215757638e-06,
+ "loss": 0.5885,
+ "step": 267
+ },
+ {
+ "epoch": 0.8392484342379958,
+ "grad_norm": 2.236640214920044,
+ "learning_rate": 4.894927784859377e-06,
+ "loss": 0.6408,
+ "step": 268
+ },
+ {
+ "epoch": 0.8423799582463466,
+ "grad_norm": 0.9715856313705444,
+ "learning_rate": 4.893682170768802e-06,
+ "loss": 0.5954,
+ "step": 269
+ },
+ {
+ "epoch": 0.8455114822546973,
+ "grad_norm": 1.0249912738800049,
+ "learning_rate": 4.892429377221928e-06,
+ "loss": 0.6186,
+ "step": 270
+ },
+ {
+ "epoch": 0.848643006263048,
+ "grad_norm": 1.255426049232483,
+ "learning_rate": 4.891169407976302e-06,
+ "loss": 0.6351,
+ "step": 271
+ },
+ {
+ "epoch": 0.8517745302713987,
+ "grad_norm": 0.9339559674263,
+ "learning_rate": 4.889902266810995e-06,
+ "loss": 0.5944,
+ "step": 272
+ },
+ {
+ "epoch": 0.8549060542797495,
+ "grad_norm": 1.2473429441452026,
+ "learning_rate": 4.888627957526589e-06,
+ "loss": 0.544,
+ "step": 273
+ },
+ {
+ "epoch": 0.8580375782881002,
+ "grad_norm": 1.0589442253112793,
+ "learning_rate": 4.887346483945166e-06,
+ "loss": 0.5543,
+ "step": 274
+ },
+ {
+ "epoch": 0.8611691022964509,
+ "grad_norm": 0.9844024777412415,
+ "learning_rate": 4.886057849910294e-06,
+ "loss": 0.5941,
+ "step": 275
+ },
+ {
+ "epoch": 0.8643006263048016,
+ "grad_norm": 2.88578200340271,
+ "learning_rate": 4.8847620592870196e-06,
+ "loss": 0.6124,
+ "step": 276
+ },
+ {
+ "epoch": 0.8674321503131524,
+ "grad_norm": 0.7496054172515869,
+ "learning_rate": 4.8834591159618524e-06,
+ "loss": 0.6006,
+ "step": 277
+ },
+ {
+ "epoch": 0.8705636743215032,
+ "grad_norm": 0.7403052449226379,
+ "learning_rate": 4.88214902384276e-06,
+ "loss": 0.5911,
+ "step": 278
+ },
+ {
+ "epoch": 0.8736951983298539,
+ "grad_norm": 0.9003771543502808,
+ "learning_rate": 4.880831786859146e-06,
+ "loss": 0.6347,
+ "step": 279
+ },
+ {
+ "epoch": 0.8768267223382046,
+ "grad_norm": 1.0345501899719238,
+ "learning_rate": 4.879507408961847e-06,
+ "loss": 0.6111,
+ "step": 280
+ },
+ {
+ "epoch": 0.8799582463465553,
+ "grad_norm": 1.4385879039764404,
+ "learning_rate": 4.878175894123116e-06,
+ "loss": 0.6454,
+ "step": 281
+ },
+ {
+ "epoch": 0.8830897703549061,
+ "grad_norm": 0.8469482064247131,
+ "learning_rate": 4.8768372463366145e-06,
+ "loss": 0.6163,
+ "step": 282
+ },
+ {
+ "epoch": 0.8862212943632568,
+ "grad_norm": 0.8859589695930481,
+ "learning_rate": 4.875491469617395e-06,
+ "loss": 0.6144,
+ "step": 283
+ },
+ {
+ "epoch": 0.8893528183716075,
+ "grad_norm": 1.8436834812164307,
+ "learning_rate": 4.874138568001895e-06,
+ "loss": 0.6275,
+ "step": 284
+ },
+ {
+ "epoch": 0.8924843423799582,
+ "grad_norm": 0.6646101474761963,
+ "learning_rate": 4.87277854554792e-06,
+ "loss": 0.615,
+ "step": 285
+ },
+ {
+ "epoch": 0.8956158663883089,
+ "grad_norm": 1.0070925951004028,
+ "learning_rate": 4.871411406334633e-06,
+ "loss": 0.5898,
+ "step": 286
+ },
+ {
+ "epoch": 0.8987473903966597,
+ "grad_norm": 0.9785194993019104,
+ "learning_rate": 4.870037154462545e-06,
+ "loss": 0.5992,
+ "step": 287
+ },
+ {
+ "epoch": 0.9018789144050104,
+ "grad_norm": 0.7244889736175537,
+ "learning_rate": 4.868655794053497e-06,
+ "loss": 0.6078,
+ "step": 288
+ },
+ {
+ "epoch": 0.9050104384133612,
+ "grad_norm": 1.4496444463729858,
+ "learning_rate": 4.8672673292506535e-06,
+ "loss": 0.5855,
+ "step": 289
+ },
+ {
+ "epoch": 0.9081419624217119,
+ "grad_norm": 1.8514957427978516,
+ "learning_rate": 4.865871764218486e-06,
+ "loss": 0.5707,
+ "step": 290
+ },
+ {
+ "epoch": 0.9112734864300627,
+ "grad_norm": 0.8439773321151733,
+ "learning_rate": 4.864469103142763e-06,
+ "loss": 0.5562,
+ "step": 291
+ },
+ {
+ "epoch": 0.9144050104384134,
+ "grad_norm": 0.8146086931228638,
+ "learning_rate": 4.8630593502305355e-06,
+ "loss": 0.6161,
+ "step": 292
+ },
+ {
+ "epoch": 0.9175365344467641,
+ "grad_norm": 0.8920315504074097,
+ "learning_rate": 4.861642509710126e-06,
+ "loss": 0.6139,
+ "step": 293
+ },
+ {
+ "epoch": 0.9206680584551148,
+ "grad_norm": 1.4980088472366333,
+ "learning_rate": 4.860218585831116e-06,
+ "loss": 0.6187,
+ "step": 294
+ },
+ {
+ "epoch": 0.9237995824634656,
+ "grad_norm": 0.9910127520561218,
+ "learning_rate": 4.8587875828643285e-06,
+ "loss": 0.5852,
+ "step": 295
+ },
+ {
+ "epoch": 0.9269311064718163,
+ "grad_norm": 0.819600522518158,
+ "learning_rate": 4.857349505101823e-06,
+ "loss": 0.6172,
+ "step": 296
+ },
+ {
+ "epoch": 0.930062630480167,
+ "grad_norm": 1.1059772968292236,
+ "learning_rate": 4.855904356856878e-06,
+ "loss": 0.5868,
+ "step": 297
+ },
+ {
+ "epoch": 0.9331941544885177,
+ "grad_norm": 1.2362196445465088,
+ "learning_rate": 4.854452142463977e-06,
+ "loss": 0.625,
+ "step": 298
+ },
+ {
+ "epoch": 0.9363256784968684,
+ "grad_norm": 0.9956470727920532,
+ "learning_rate": 4.852992866278799e-06,
+ "loss": 0.5923,
+ "step": 299
+ },
+ {
+ "epoch": 0.9394572025052192,
+ "grad_norm": 0.864109218120575,
+ "learning_rate": 4.851526532678203e-06,
+ "loss": 0.6315,
+ "step": 300
+ },
+ {
+ "epoch": 0.94258872651357,
+ "grad_norm": 0.8900614380836487,
+ "learning_rate": 4.850053146060217e-06,
+ "loss": 0.6128,
+ "step": 301
+ },
+ {
+ "epoch": 0.9457202505219207,
+ "grad_norm": 0.927254855632782,
+ "learning_rate": 4.84857271084402e-06,
+ "loss": 0.5955,
+ "step": 302
+ },
+ {
+ "epoch": 0.9488517745302714,
+ "grad_norm": 1.0046517848968506,
+ "learning_rate": 4.847085231469935e-06,
+ "loss": 0.6134,
+ "step": 303
+ },
+ {
+ "epoch": 0.9519832985386222,
+ "grad_norm": 0.734597384929657,
+ "learning_rate": 4.8455907123994125e-06,
+ "loss": 0.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.9551148225469729,
+ "grad_norm": 0.7338348031044006,
+ "learning_rate": 4.844089158115016e-06,
+ "loss": 0.5897,
+ "step": 305
+ },
+ {
+ "epoch": 0.9582463465553236,
+ "grad_norm": 0.9163988828659058,
+ "learning_rate": 4.8425805731204106e-06,
+ "loss": 0.6051,
+ "step": 306
+ },
+ {
+ "epoch": 0.9613778705636743,
+ "grad_norm": 1.050246238708496,
+ "learning_rate": 4.84106496194035e-06,
+ "loss": 0.5751,
+ "step": 307
+ },
+ {
+ "epoch": 0.964509394572025,
+ "grad_norm": 0.7637603878974915,
+ "learning_rate": 4.83954232912066e-06,
+ "loss": 0.5677,
+ "step": 308
+ },
+ {
+ "epoch": 0.9676409185803758,
+ "grad_norm": 0.7110525965690613,
+ "learning_rate": 4.838012679228229e-06,
+ "loss": 0.6051,
+ "step": 309
+ },
+ {
+ "epoch": 0.9707724425887265,
+ "grad_norm": 0.7662068605422974,
+ "learning_rate": 4.836476016850988e-06,
+ "loss": 0.59,
+ "step": 310
+ },
+ {
+ "epoch": 0.9739039665970772,
+ "grad_norm": 0.8907375335693359,
+ "learning_rate": 4.834932346597906e-06,
+ "loss": 0.5792,
+ "step": 311
+ },
+ {
+ "epoch": 0.9770354906054279,
+ "grad_norm": 0.8939849138259888,
+ "learning_rate": 4.833381673098966e-06,
+ "loss": 0.6062,
+ "step": 312
+ },
+ {
+ "epoch": 0.9801670146137788,
+ "grad_norm": 0.8878788948059082,
+ "learning_rate": 4.8318240010051595e-06,
+ "loss": 0.5694,
+ "step": 313
+ },
+ {
+ "epoch": 0.9832985386221295,
+ "grad_norm": 1.2523870468139648,
+ "learning_rate": 4.830259334988468e-06,
+ "loss": 0.5809,
+ "step": 314
+ },
+ {
+ "epoch": 0.9864300626304802,
+ "grad_norm": 1.0836797952651978,
+ "learning_rate": 4.82868767974185e-06,
+ "loss": 0.5949,
+ "step": 315
+ },
+ {
+ "epoch": 0.9895615866388309,
+ "grad_norm": 0.7985473871231079,
+ "learning_rate": 4.827109039979226e-06,
+ "loss": 0.6057,
+ "step": 316
+ },
+ {
+ "epoch": 0.9926931106471816,
+ "grad_norm": 1.042951226234436,
+ "learning_rate": 4.825523420435469e-06,
+ "loss": 0.6004,
+ "step": 317
+ },
+ {
+ "epoch": 0.9958246346555324,
+ "grad_norm": 0.7845115661621094,
+ "learning_rate": 4.823930825866381e-06,
+ "loss": 0.6161,
+ "step": 318
+ },
+ {
+ "epoch": 0.9989561586638831,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.82233126104869e-06,
+ "loss": 0.5912,
+ "step": 319
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.8207247307800275e-06,
+ "loss": 0.1914,
+ "step": 320
+ },
+ {
+ "epoch": 1.0031315240083507,
+ "grad_norm": 0.751028835773468,
+ "learning_rate": 4.819111239878916e-06,
+ "loss": 0.5802,
+ "step": 321
+ },
+ {
+ "epoch": 1.0062630480167014,
+ "grad_norm": 1.4943569898605347,
+ "learning_rate": 4.817490793184758e-06,
+ "loss": 0.613,
+ "step": 322
+ },
+ {
+ "epoch": 1.0093945720250521,
+ "grad_norm": 2.296318531036377,
+ "learning_rate": 4.815863395557816e-06,
+ "loss": 0.5453,
+ "step": 323
+ },
+ {
+ "epoch": 1.0125260960334028,
+ "grad_norm": 0.760101318359375,
+ "learning_rate": 4.814229051879202e-06,
+ "loss": 0.5302,
+ "step": 324
+ },
+ {
+ "epoch": 1.0156576200417538,
+ "grad_norm": 0.8145846128463745,
+ "learning_rate": 4.812587767050861e-06,
+ "loss": 0.5831,
+ "step": 325
+ },
+ {
+ "epoch": 1.0187891440501045,
+ "grad_norm": 0.9169796109199524,
+ "learning_rate": 4.8109395459955565e-06,
+ "loss": 0.5756,
+ "step": 326
+ },
+ {
+ "epoch": 1.0219206680584552,
+ "grad_norm": 0.8791524171829224,
+ "learning_rate": 4.809284393656858e-06,
+ "loss": 0.5988,
+ "step": 327
+ },
+ {
+ "epoch": 1.0250521920668059,
+ "grad_norm": 1.0184170007705688,
+ "learning_rate": 4.807622314999122e-06,
+ "loss": 0.5476,
+ "step": 328
+ },
+ {
+ "epoch": 1.0281837160751566,
+ "grad_norm": 0.8095184564590454,
+ "learning_rate": 4.8059533150074805e-06,
+ "loss": 0.5723,
+ "step": 329
+ },
+ {
+ "epoch": 1.0313152400835073,
+ "grad_norm": 0.7621930241584778,
+ "learning_rate": 4.804277398687826e-06,
+ "loss": 0.5841,
+ "step": 330
+ },
+ {
+ "epoch": 1.034446764091858,
+ "grad_norm": 3.729628324508667,
+ "learning_rate": 4.802594571066791e-06,
+ "loss": 0.5639,
+ "step": 331
+ },
+ {
+ "epoch": 1.0375782881002087,
+ "grad_norm": 1.6502974033355713,
+ "learning_rate": 4.800904837191743e-06,
+ "loss": 0.6024,
+ "step": 332
+ },
+ {
+ "epoch": 1.0407098121085594,
+ "grad_norm": 0.8031198978424072,
+ "learning_rate": 4.799208202130762e-06,
+ "loss": 0.5305,
+ "step": 333
+ },
+ {
+ "epoch": 1.0438413361169103,
+ "grad_norm": 0.939644992351532,
+ "learning_rate": 4.797504670972623e-06,
+ "loss": 0.5446,
+ "step": 334
+ },
+ {
+ "epoch": 1.046972860125261,
+ "grad_norm": 1.0589954853057861,
+ "learning_rate": 4.795794248826789e-06,
+ "loss": 0.5366,
+ "step": 335
+ },
+ {
+ "epoch": 1.0501043841336117,
+ "grad_norm": 0.9089614748954773,
+ "learning_rate": 4.794076940823391e-06,
+ "loss": 0.5795,
+ "step": 336
+ },
+ {
+ "epoch": 1.0532359081419624,
+ "grad_norm": 0.7732561230659485,
+ "learning_rate": 4.792352752113212e-06,
+ "loss": 0.5765,
+ "step": 337
+ },
+ {
+ "epoch": 1.0563674321503131,
+ "grad_norm": 1.811553955078125,
+ "learning_rate": 4.790621687867672e-06,
+ "loss": 0.561,
+ "step": 338
+ },
+ {
+ "epoch": 1.0594989561586639,
+ "grad_norm": 1.1930758953094482,
+ "learning_rate": 4.788883753278813e-06,
+ "loss": 0.5,
+ "step": 339
+ },
+ {
+ "epoch": 1.0626304801670146,
+ "grad_norm": 0.9551813006401062,
+ "learning_rate": 4.787138953559285e-06,
+ "loss": 0.5228,
+ "step": 340
+ },
+ {
+ "epoch": 1.0657620041753653,
+ "grad_norm": 0.9609586596488953,
+ "learning_rate": 4.785387293942329e-06,
+ "loss": 0.5827,
+ "step": 341
+ },
+ {
+ "epoch": 1.068893528183716,
+ "grad_norm": 0.8403449654579163,
+ "learning_rate": 4.783628779681759e-06,
+ "loss": 0.5585,
+ "step": 342
+ },
+ {
+ "epoch": 1.072025052192067,
+ "grad_norm": 0.9108251929283142,
+ "learning_rate": 4.7818634160519496e-06,
+ "loss": 0.6077,
+ "step": 343
+ },
+ {
+ "epoch": 1.0751565762004176,
+ "grad_norm": 0.9476898908615112,
+ "learning_rate": 4.780091208347819e-06,
+ "loss": 0.5493,
+ "step": 344
+ },
+ {
+ "epoch": 1.0782881002087683,
+ "grad_norm": 1.1943707466125488,
+ "learning_rate": 4.778312161884813e-06,
+ "loss": 0.5736,
+ "step": 345
+ },
+ {
+ "epoch": 1.081419624217119,
+ "grad_norm": 3.1342639923095703,
+ "learning_rate": 4.77652628199889e-06,
+ "loss": 0.5765,
+ "step": 346
+ },
+ {
+ "epoch": 1.0845511482254697,
+ "grad_norm": 2.7982125282287598,
+ "learning_rate": 4.7747335740465015e-06,
+ "loss": 0.6003,
+ "step": 347
+ },
+ {
+ "epoch": 1.0876826722338204,
+ "grad_norm": 1.5068914890289307,
+ "learning_rate": 4.7729340434045815e-06,
+ "loss": 0.5033,
+ "step": 348
+ },
+ {
+ "epoch": 1.0908141962421711,
+ "grad_norm": 0.8273429274559021,
+ "learning_rate": 4.771127695470527e-06,
+ "loss": 0.5309,
+ "step": 349
+ },
+ {
+ "epoch": 1.0939457202505218,
+ "grad_norm": 1.104974389076233,
+ "learning_rate": 4.76931453566218e-06,
+ "loss": 0.5244,
+ "step": 350
+ },
+ {
+ "epoch": 1.0970772442588728,
+ "grad_norm": 1.096509337425232,
+ "learning_rate": 4.7674945694178166e-06,
+ "loss": 0.5585,
+ "step": 351
+ },
+ {
+ "epoch": 1.1002087682672235,
+ "grad_norm": 1.0238200426101685,
+ "learning_rate": 4.765667802196127e-06,
+ "loss": 0.5589,
+ "step": 352
+ },
+ {
+ "epoch": 1.1033402922755742,
+ "grad_norm": 0.7515526413917542,
+ "learning_rate": 4.763834239476197e-06,
+ "loss": 0.5304,
+ "step": 353
+ },
+ {
+ "epoch": 1.1064718162839249,
+ "grad_norm": 1.0282566547393799,
+ "learning_rate": 4.761993886757499e-06,
+ "loss": 0.5476,
+ "step": 354
+ },
+ {
+ "epoch": 1.1096033402922756,
+ "grad_norm": 0.9962708950042725,
+ "learning_rate": 4.760146749559868e-06,
+ "loss": 0.5117,
+ "step": 355
+ },
+ {
+ "epoch": 1.1127348643006263,
+ "grad_norm": 0.7851671576499939,
+ "learning_rate": 4.758292833423488e-06,
+ "loss": 0.5542,
+ "step": 356
+ },
+ {
+ "epoch": 1.115866388308977,
+ "grad_norm": 0.8857759237289429,
+ "learning_rate": 4.756432143908876e-06,
+ "loss": 0.544,
+ "step": 357
+ },
+ {
+ "epoch": 1.1189979123173277,
+ "grad_norm": 0.9402740597724915,
+ "learning_rate": 4.7545646865968645e-06,
+ "loss": 0.5656,
+ "step": 358
+ },
+ {
+ "epoch": 1.1221294363256784,
+ "grad_norm": 0.8210407495498657,
+ "learning_rate": 4.752690467088584e-06,
+ "loss": 0.5733,
+ "step": 359
+ },
+ {
+ "epoch": 1.1252609603340291,
+ "grad_norm": 0.795684278011322,
+ "learning_rate": 4.750809491005449e-06,
+ "loss": 0.5678,
+ "step": 360
+ },
+ {
+ "epoch": 1.12839248434238,
+ "grad_norm": 0.8712463974952698,
+ "learning_rate": 4.748921763989139e-06,
+ "loss": 0.5777,
+ "step": 361
+ },
+ {
+ "epoch": 1.1315240083507307,
+ "grad_norm": 0.9810119867324829,
+ "learning_rate": 4.747027291701578e-06,
+ "loss": 0.5511,
+ "step": 362
+ },
+ {
+ "epoch": 1.1346555323590815,
+ "grad_norm": 0.81117844581604,
+ "learning_rate": 4.745126079824926e-06,
+ "loss": 0.5038,
+ "step": 363
+ },
+ {
+ "epoch": 1.1377870563674322,
+ "grad_norm": 0.7631494402885437,
+ "learning_rate": 4.743218134061556e-06,
+ "loss": 0.6272,
+ "step": 364
+ },
+ {
+ "epoch": 1.1409185803757829,
+ "grad_norm": 0.7601696252822876,
+ "learning_rate": 4.741303460134038e-06,
+ "loss": 0.571,
+ "step": 365
+ },
+ {
+ "epoch": 1.1440501043841336,
+ "grad_norm": 1.7977744340896606,
+ "learning_rate": 4.7393820637851205e-06,
+ "loss": 0.538,
+ "step": 366
+ },
+ {
+ "epoch": 1.1471816283924843,
+ "grad_norm": 2.022578001022339,
+ "learning_rate": 4.737453950777718e-06,
+ "loss": 0.5822,
+ "step": 367
+ },
+ {
+ "epoch": 1.150313152400835,
+ "grad_norm": 0.7586764693260193,
+ "learning_rate": 4.735519126894885e-06,
+ "loss": 0.5986,
+ "step": 368
+ },
+ {
+ "epoch": 1.153444676409186,
+ "grad_norm": 0.8970286846160889,
+ "learning_rate": 4.733577597939812e-06,
+ "loss": 0.542,
+ "step": 369
+ },
+ {
+ "epoch": 1.1565762004175366,
+ "grad_norm": 0.8546352982521057,
+ "learning_rate": 4.731629369735793e-06,
+ "loss": 0.5832,
+ "step": 370
+ },
+ {
+ "epoch": 1.1597077244258873,
+ "grad_norm": 0.9266164898872375,
+ "learning_rate": 4.72967444812622e-06,
+ "loss": 0.551,
+ "step": 371
+ },
+ {
+ "epoch": 1.162839248434238,
+ "grad_norm": 1.0413658618927002,
+ "learning_rate": 4.7277128389745595e-06,
+ "loss": 0.5866,
+ "step": 372
+ },
+ {
+ "epoch": 1.1659707724425887,
+ "grad_norm": 0.9312199950218201,
+ "learning_rate": 4.7257445481643334e-06,
+ "loss": 0.5723,
+ "step": 373
+ },
+ {
+ "epoch": 1.1691022964509394,
+ "grad_norm": 0.7389806509017944,
+ "learning_rate": 4.723769581599109e-06,
+ "loss": 0.5209,
+ "step": 374
+ },
+ {
+ "epoch": 1.1722338204592901,
+ "grad_norm": 3.053169012069702,
+ "learning_rate": 4.721787945202472e-06,
+ "loss": 0.6094,
+ "step": 375
+ },
+ {
+ "epoch": 1.1753653444676408,
+ "grad_norm": 1.288589596748352,
+ "learning_rate": 4.719799644918017e-06,
+ "loss": 0.5616,
+ "step": 376
+ },
+ {
+ "epoch": 1.1784968684759916,
+ "grad_norm": 0.7675042152404785,
+ "learning_rate": 4.717804686709323e-06,
+ "loss": 0.4963,
+ "step": 377
+ },
+ {
+ "epoch": 1.1816283924843423,
+ "grad_norm": 0.7246491312980652,
+ "learning_rate": 4.715803076559938e-06,
+ "loss": 0.5273,
+ "step": 378
+ },
+ {
+ "epoch": 1.1847599164926932,
+ "grad_norm": 0.8193361759185791,
+ "learning_rate": 4.713794820473366e-06,
+ "loss": 0.6107,
+ "step": 379
+ },
+ {
+ "epoch": 1.187891440501044,
+ "grad_norm": 0.9498510360717773,
+ "learning_rate": 4.711779924473037e-06,
+ "loss": 0.5421,
+ "step": 380
+ },
+ {
+ "epoch": 1.1910229645093946,
+ "grad_norm": 1.0479756593704224,
+ "learning_rate": 4.709758394602305e-06,
+ "loss": 0.5257,
+ "step": 381
+ },
+ {
+ "epoch": 1.1941544885177453,
+ "grad_norm": 0.907866895198822,
+ "learning_rate": 4.707730236924413e-06,
+ "loss": 0.5289,
+ "step": 382
+ },
+ {
+ "epoch": 1.197286012526096,
+ "grad_norm": 0.8861165642738342,
+ "learning_rate": 4.705695457522488e-06,
+ "loss": 0.5727,
+ "step": 383
+ },
+ {
+ "epoch": 1.2004175365344467,
+ "grad_norm": 0.7467761039733887,
+ "learning_rate": 4.703654062499516e-06,
+ "loss": 0.5602,
+ "step": 384
+ },
+ {
+ "epoch": 1.2035490605427974,
+ "grad_norm": 0.7456198334693909,
+ "learning_rate": 4.701606057978325e-06,
+ "loss": 0.5345,
+ "step": 385
+ },
+ {
+ "epoch": 1.2066805845511483,
+ "grad_norm": 1.9976060390472412,
+ "learning_rate": 4.699551450101571e-06,
+ "loss": 0.5504,
+ "step": 386
+ },
+ {
+ "epoch": 1.209812108559499,
+ "grad_norm": 1.5253807306289673,
+ "learning_rate": 4.697490245031709e-06,
+ "loss": 0.5568,
+ "step": 387
+ },
+ {
+ "epoch": 1.2129436325678498,
+ "grad_norm": 1.0786075592041016,
+ "learning_rate": 4.6954224489509885e-06,
+ "loss": 0.5564,
+ "step": 388
+ },
+ {
+ "epoch": 1.2160751565762005,
+ "grad_norm": 0.8385995030403137,
+ "learning_rate": 4.693348068061422e-06,
+ "loss": 0.5341,
+ "step": 389
+ },
+ {
+ "epoch": 1.2192066805845512,
+ "grad_norm": 0.8184949159622192,
+ "learning_rate": 4.691267108584774e-06,
+ "loss": 0.5614,
+ "step": 390
+ },
+ {
+ "epoch": 1.2223382045929019,
+ "grad_norm": 0.9964898824691772,
+ "learning_rate": 4.68917957676254e-06,
+ "loss": 0.5589,
+ "step": 391
+ },
+ {
+ "epoch": 1.2254697286012526,
+ "grad_norm": 1.0168914794921875,
+ "learning_rate": 4.687085478855931e-06,
+ "loss": 0.5892,
+ "step": 392
+ },
+ {
+ "epoch": 1.2286012526096033,
+ "grad_norm": 0.8841140866279602,
+ "learning_rate": 4.684984821145846e-06,
+ "loss": 0.5327,
+ "step": 393
+ },
+ {
+ "epoch": 1.231732776617954,
+ "grad_norm": 0.834431529045105,
+ "learning_rate": 4.682877609932866e-06,
+ "loss": 0.5594,
+ "step": 394
+ },
+ {
+ "epoch": 1.2348643006263047,
+ "grad_norm": 0.7256641983985901,
+ "learning_rate": 4.6807638515372234e-06,
+ "loss": 0.5443,
+ "step": 395
+ },
+ {
+ "epoch": 1.2379958246346556,
+ "grad_norm": 0.765096127986908,
+ "learning_rate": 4.678643552298788e-06,
+ "loss": 0.5439,
+ "step": 396
+ },
+ {
+ "epoch": 1.2411273486430063,
+ "grad_norm": 0.8760455846786499,
+ "learning_rate": 4.676516718577051e-06,
+ "loss": 0.5485,
+ "step": 397
+ },
+ {
+ "epoch": 1.244258872651357,
+ "grad_norm": 2.7111501693725586,
+ "learning_rate": 4.674383356751099e-06,
+ "loss": 0.5696,
+ "step": 398
+ },
+ {
+ "epoch": 1.2473903966597077,
+ "grad_norm": 1.0521738529205322,
+ "learning_rate": 4.672243473219601e-06,
+ "loss": 0.5503,
+ "step": 399
+ },
+ {
+ "epoch": 1.2505219206680585,
+ "grad_norm": 0.8909669518470764,
+ "learning_rate": 4.670097074400785e-06,
+ "loss": 0.5183,
+ "step": 400
+ },
+ {
+ "epoch": 1.2536534446764092,
+ "grad_norm": 0.7483847737312317,
+ "learning_rate": 4.667944166732424e-06,
+ "loss": 0.5669,
+ "step": 401
+ },
+ {
+ "epoch": 1.2567849686847599,
+ "grad_norm": 1.146997094154358,
+ "learning_rate": 4.665784756671808e-06,
+ "loss": 0.5464,
+ "step": 402
+ },
+ {
+ "epoch": 1.2599164926931106,
+ "grad_norm": 0.8998096585273743,
+ "learning_rate": 4.663618850695733e-06,
+ "loss": 0.5502,
+ "step": 403
+ },
+ {
+ "epoch": 1.2630480167014615,
+ "grad_norm": 0.8882688283920288,
+ "learning_rate": 4.6614464553004795e-06,
+ "loss": 0.5507,
+ "step": 404
+ },
+ {
+ "epoch": 1.2661795407098122,
+ "grad_norm": 0.8310684561729431,
+ "learning_rate": 4.659267577001789e-06,
+ "loss": 0.5164,
+ "step": 405
+ },
+ {
+ "epoch": 1.269311064718163,
+ "grad_norm": 0.9286114573478699,
+ "learning_rate": 4.657082222334851e-06,
+ "loss": 0.4813,
+ "step": 406
+ },
+ {
+ "epoch": 1.2724425887265136,
+ "grad_norm": 1.2394906282424927,
+ "learning_rate": 4.654890397854275e-06,
+ "loss": 0.5837,
+ "step": 407
+ },
+ {
+ "epoch": 1.2755741127348643,
+ "grad_norm": 4.00585412979126,
+ "learning_rate": 4.652692110134079e-06,
+ "loss": 0.5453,
+ "step": 408
+ },
+ {
+ "epoch": 1.278705636743215,
+ "grad_norm": 1.1667803525924683,
+ "learning_rate": 4.650487365767667e-06,
+ "loss": 0.5652,
+ "step": 409
+ },
+ {
+ "epoch": 1.2818371607515657,
+ "grad_norm": 0.9351289868354797,
+ "learning_rate": 4.648276171367807e-06,
+ "loss": 0.5576,
+ "step": 410
+ },
+ {
+ "epoch": 1.2849686847599164,
+ "grad_norm": 0.8107728958129883,
+ "learning_rate": 4.646058533566614e-06,
+ "loss": 0.5821,
+ "step": 411
+ },
+ {
+ "epoch": 1.2881002087682671,
+ "grad_norm": 0.7293011546134949,
+ "learning_rate": 4.643834459015525e-06,
+ "loss": 0.5363,
+ "step": 412
+ },
+ {
+ "epoch": 1.2912317327766178,
+ "grad_norm": 0.7550690770149231,
+ "learning_rate": 4.641603954385289e-06,
+ "loss": 0.53,
+ "step": 413
+ },
+ {
+ "epoch": 1.2943632567849686,
+ "grad_norm": 0.7626177072525024,
+ "learning_rate": 4.639367026365938e-06,
+ "loss": 0.5307,
+ "step": 414
+ },
+ {
+ "epoch": 1.2974947807933195,
+ "grad_norm": 1.0841096639633179,
+ "learning_rate": 4.637123681666769e-06,
+ "loss": 0.5162,
+ "step": 415
+ },
+ {
+ "epoch": 1.3006263048016702,
+ "grad_norm": 0.8814271092414856,
+ "learning_rate": 4.634873927016326e-06,
+ "loss": 0.5369,
+ "step": 416
+ },
+ {
+ "epoch": 1.303757828810021,
+ "grad_norm": 0.7402971982955933,
+ "learning_rate": 4.632617769162378e-06,
+ "loss": 0.5846,
+ "step": 417
+ },
+ {
+ "epoch": 1.3068893528183716,
+ "grad_norm": 0.8106061220169067,
+ "learning_rate": 4.6303552148719e-06,
+ "loss": 0.5289,
+ "step": 418
+ },
+ {
+ "epoch": 1.3100208768267223,
+ "grad_norm": 0.9241361618041992,
+ "learning_rate": 4.628086270931053e-06,
+ "loss": 0.5714,
+ "step": 419
+ },
+ {
+ "epoch": 1.313152400835073,
+ "grad_norm": 0.950332522392273,
+ "learning_rate": 4.625810944145159e-06,
+ "loss": 0.5817,
+ "step": 420
+ },
+ {
+ "epoch": 1.316283924843424,
+ "grad_norm": 0.9037718772888184,
+ "learning_rate": 4.623529241338689e-06,
+ "loss": 0.5902,
+ "step": 421
+ },
+ {
+ "epoch": 1.3194154488517746,
+ "grad_norm": 1.2110658884048462,
+ "learning_rate": 4.621241169355234e-06,
+ "loss": 0.561,
+ "step": 422
+ },
+ {
+ "epoch": 1.3225469728601253,
+ "grad_norm": 0.8582742214202881,
+ "learning_rate": 4.618946735057491e-06,
+ "loss": 0.5003,
+ "step": 423
+ },
+ {
+ "epoch": 1.325678496868476,
+ "grad_norm": 0.9203405976295471,
+ "learning_rate": 4.6166459453272386e-06,
+ "loss": 0.5639,
+ "step": 424
+ },
+ {
+ "epoch": 1.3288100208768268,
+ "grad_norm": 0.933721125125885,
+ "learning_rate": 4.614338807065317e-06,
+ "loss": 0.5766,
+ "step": 425
+ },
+ {
+ "epoch": 1.3319415448851775,
+ "grad_norm": 0.8435131311416626,
+ "learning_rate": 4.612025327191608e-06,
+ "loss": 0.5656,
+ "step": 426
+ },
+ {
+ "epoch": 1.3350730688935282,
+ "grad_norm": 0.795796811580658,
+ "learning_rate": 4.609705512645015e-06,
+ "loss": 0.4996,
+ "step": 427
+ },
+ {
+ "epoch": 1.3382045929018789,
+ "grad_norm": 0.8168228268623352,
+ "learning_rate": 4.6073793703834404e-06,
+ "loss": 0.5465,
+ "step": 428
+ },
+ {
+ "epoch": 1.3413361169102296,
+ "grad_norm": 0.8795569539070129,
+ "learning_rate": 4.605046907383765e-06,
+ "loss": 0.5407,
+ "step": 429
+ },
+ {
+ "epoch": 1.3444676409185803,
+ "grad_norm": 0.8504094481468201,
+ "learning_rate": 4.6027081306418295e-06,
+ "loss": 0.5589,
+ "step": 430
+ },
+ {
+ "epoch": 1.347599164926931,
+ "grad_norm": 1.485202431678772,
+ "learning_rate": 4.600363047172409e-06,
+ "loss": 0.5515,
+ "step": 431
+ },
+ {
+ "epoch": 1.350730688935282,
+ "grad_norm": 1.1156851053237915,
+ "learning_rate": 4.598011664009197e-06,
+ "loss": 0.5681,
+ "step": 432
+ },
+ {
+ "epoch": 1.3538622129436326,
+ "grad_norm": 0.8666876554489136,
+ "learning_rate": 4.595653988204779e-06,
+ "loss": 0.5451,
+ "step": 433
+ },
+ {
+ "epoch": 1.3569937369519833,
+ "grad_norm": 0.8192381858825684,
+ "learning_rate": 4.593290026830619e-06,
+ "loss": 0.5632,
+ "step": 434
+ },
+ {
+ "epoch": 1.360125260960334,
+ "grad_norm": 0.7994804978370667,
+ "learning_rate": 4.590919786977029e-06,
+ "loss": 0.5181,
+ "step": 435
+ },
+ {
+ "epoch": 1.3632567849686847,
+ "grad_norm": 0.8038607835769653,
+ "learning_rate": 4.5885432757531535e-06,
+ "loss": 0.5385,
+ "step": 436
+ },
+ {
+ "epoch": 1.3663883089770354,
+ "grad_norm": 0.7677503824234009,
+ "learning_rate": 4.586160500286948e-06,
+ "loss": 0.5455,
+ "step": 437
+ },
+ {
+ "epoch": 1.3695198329853862,
+ "grad_norm": 0.8293285369873047,
+ "learning_rate": 4.583771467725157e-06,
+ "loss": 0.5401,
+ "step": 438
+ },
+ {
+ "epoch": 1.372651356993737,
+ "grad_norm": 0.8607680797576904,
+ "learning_rate": 4.581376185233289e-06,
+ "loss": 0.5782,
+ "step": 439
+ },
+ {
+ "epoch": 1.3757828810020878,
+ "grad_norm": 0.8847081065177917,
+ "learning_rate": 4.578974659995601e-06,
+ "loss": 0.572,
+ "step": 440
+ },
+ {
+ "epoch": 1.3789144050104385,
+ "grad_norm": 0.7669641971588135,
+ "learning_rate": 4.576566899215075e-06,
+ "loss": 0.5655,
+ "step": 441
+ },
+ {
+ "epoch": 1.3820459290187892,
+ "grad_norm": 0.8514629006385803,
+ "learning_rate": 4.5741529101133904e-06,
+ "loss": 0.5218,
+ "step": 442
+ },
+ {
+ "epoch": 1.38517745302714,
+ "grad_norm": 0.8719842433929443,
+ "learning_rate": 4.5717326999309145e-06,
+ "loss": 0.5579,
+ "step": 443
+ },
+ {
+ "epoch": 1.3883089770354906,
+ "grad_norm": 1.1142809391021729,
+ "learning_rate": 4.569306275926667e-06,
+ "loss": 0.5535,
+ "step": 444
+ },
+ {
+ "epoch": 1.3914405010438413,
+ "grad_norm": 0.7392387986183167,
+ "learning_rate": 4.566873645378309e-06,
+ "loss": 0.5335,
+ "step": 445
+ },
+ {
+ "epoch": 1.394572025052192,
+ "grad_norm": 0.9066658616065979,
+ "learning_rate": 4.564434815582117e-06,
+ "loss": 0.5286,
+ "step": 446
+ },
+ {
+ "epoch": 1.3977035490605427,
+ "grad_norm": 0.8648932576179504,
+ "learning_rate": 4.561989793852959e-06,
+ "loss": 0.5008,
+ "step": 447
+ },
+ {
+ "epoch": 1.4008350730688934,
+ "grad_norm": 0.7768712043762207,
+ "learning_rate": 4.559538587524276e-06,
+ "loss": 0.5727,
+ "step": 448
+ },
+ {
+ "epoch": 1.4039665970772441,
+ "grad_norm": 0.7851182222366333,
+ "learning_rate": 4.557081203948059e-06,
+ "loss": 0.5731,
+ "step": 449
+ },
+ {
+ "epoch": 1.407098121085595,
+ "grad_norm": 0.8959861397743225,
+ "learning_rate": 4.5546176504948255e-06,
+ "loss": 0.5587,
+ "step": 450
+ },
+ {
+ "epoch": 1.4102296450939458,
+ "grad_norm": 1.0538026094436646,
+ "learning_rate": 4.552147934553601e-06,
+ "loss": 0.5808,
+ "step": 451
+ },
+ {
+ "epoch": 1.4133611691022965,
+ "grad_norm": 0.9887629151344299,
+ "learning_rate": 4.54967206353189e-06,
+ "loss": 0.5658,
+ "step": 452
+ },
+ {
+ "epoch": 1.4164926931106472,
+ "grad_norm": 0.9579302072525024,
+ "learning_rate": 4.547190044855663e-06,
+ "loss": 0.5092,
+ "step": 453
+ },
+ {
+ "epoch": 1.4196242171189979,
+ "grad_norm": 0.6993522047996521,
+ "learning_rate": 4.544701885969326e-06,
+ "loss": 0.5233,
+ "step": 454
+ },
+ {
+ "epoch": 1.4227557411273486,
+ "grad_norm": 0.8197568655014038,
+ "learning_rate": 4.542207594335703e-06,
+ "loss": 0.553,
+ "step": 455
+ },
+ {
+ "epoch": 1.4258872651356993,
+ "grad_norm": 2.921947717666626,
+ "learning_rate": 4.53970717743601e-06,
+ "loss": 0.4857,
+ "step": 456
+ },
+ {
+ "epoch": 1.4290187891440502,
+ "grad_norm": 1.3547242879867554,
+ "learning_rate": 4.53720064276984e-06,
+ "loss": 0.5676,
+ "step": 457
+ },
+ {
+ "epoch": 1.432150313152401,
+ "grad_norm": 1.4175567626953125,
+ "learning_rate": 4.534687997855131e-06,
+ "loss": 0.5164,
+ "step": 458
+ },
+ {
+ "epoch": 1.4352818371607516,
+ "grad_norm": 1.378146767616272,
+ "learning_rate": 4.532169250228145e-06,
+ "loss": 0.5429,
+ "step": 459
+ },
+ {
+ "epoch": 1.4384133611691023,
+ "grad_norm": 0.7811698317527771,
+ "learning_rate": 4.529644407443456e-06,
+ "loss": 0.524,
+ "step": 460
+ },
+ {
+ "epoch": 1.441544885177453,
+ "grad_norm": 1.1481678485870361,
+ "learning_rate": 4.527113477073914e-06,
+ "loss": 0.5513,
+ "step": 461
+ },
+ {
+ "epoch": 1.4446764091858038,
+ "grad_norm": 0.8450161218643188,
+ "learning_rate": 4.5245764667106266e-06,
+ "loss": 0.5632,
+ "step": 462
+ },
+ {
+ "epoch": 1.4478079331941545,
+ "grad_norm": 1.1582145690917969,
+ "learning_rate": 4.522033383962941e-06,
+ "loss": 0.5834,
+ "step": 463
+ },
+ {
+ "epoch": 1.4509394572025052,
+ "grad_norm": 1.0403447151184082,
+ "learning_rate": 4.519484236458416e-06,
+ "loss": 0.506,
+ "step": 464
+ },
+ {
+ "epoch": 1.4540709812108559,
+ "grad_norm": 0.7894920706748962,
+ "learning_rate": 4.516929031842799e-06,
+ "loss": 0.5526,
+ "step": 465
+ },
+ {
+ "epoch": 1.4572025052192066,
+ "grad_norm": 0.8092262744903564,
+ "learning_rate": 4.51436777778001e-06,
+ "loss": 0.5619,
+ "step": 466
+ },
+ {
+ "epoch": 1.4603340292275573,
+ "grad_norm": 0.9773806929588318,
+ "learning_rate": 4.511800481952106e-06,
+ "loss": 0.5179,
+ "step": 467
+ },
+ {
+ "epoch": 1.4634655532359082,
+ "grad_norm": 1.018676519393921,
+ "learning_rate": 4.509227152059271e-06,
+ "loss": 0.5415,
+ "step": 468
+ },
+ {
+ "epoch": 1.466597077244259,
+ "grad_norm": 0.7457838654518127,
+ "learning_rate": 4.506647795819784e-06,
+ "loss": 0.5473,
+ "step": 469
+ },
+ {
+ "epoch": 1.4697286012526096,
+ "grad_norm": 0.7826436161994934,
+ "learning_rate": 4.50406242097e-06,
+ "loss": 0.5526,
+ "step": 470
+ },
+ {
+ "epoch": 1.4728601252609603,
+ "grad_norm": 0.9492483139038086,
+ "learning_rate": 4.501471035264328e-06,
+ "loss": 0.5179,
+ "step": 471
+ },
+ {
+ "epoch": 1.475991649269311,
+ "grad_norm": 0.93398517370224,
+ "learning_rate": 4.4988736464752005e-06,
+ "loss": 0.5195,
+ "step": 472
+ },
+ {
+ "epoch": 1.4791231732776617,
+ "grad_norm": 0.8396487832069397,
+ "learning_rate": 4.496270262393061e-06,
+ "loss": 0.5447,
+ "step": 473
+ },
+ {
+ "epoch": 1.4822546972860124,
+ "grad_norm": 0.7450584173202515,
+ "learning_rate": 4.4936608908263315e-06,
+ "loss": 0.5207,
+ "step": 474
+ },
+ {
+ "epoch": 1.4853862212943634,
+ "grad_norm": 0.7887717485427856,
+ "learning_rate": 4.491045539601392e-06,
+ "loss": 0.523,
+ "step": 475
+ },
+ {
+ "epoch": 1.488517745302714,
+ "grad_norm": 1.2051388025283813,
+ "learning_rate": 4.48842421656256e-06,
+ "loss": 0.5402,
+ "step": 476
+ },
+ {
+ "epoch": 1.4916492693110648,
+ "grad_norm": 2.3103389739990234,
+ "learning_rate": 4.485796929572063e-06,
+ "loss": 0.5588,
+ "step": 477
+ },
+ {
+ "epoch": 1.4947807933194155,
+ "grad_norm": 0.7473112344741821,
+ "learning_rate": 4.483163686510016e-06,
+ "loss": 0.5731,
+ "step": 478
+ },
+ {
+ "epoch": 1.4979123173277662,
+ "grad_norm": 0.7545126676559448,
+ "learning_rate": 4.480524495274399e-06,
+ "loss": 0.5536,
+ "step": 479
+ },
+ {
+ "epoch": 1.501043841336117,
+ "grad_norm": 0.7801297903060913,
+ "learning_rate": 4.477879363781033e-06,
+ "loss": 0.5696,
+ "step": 480
+ },
+ {
+ "epoch": 1.5041753653444676,
+ "grad_norm": 0.7740563750267029,
+ "learning_rate": 4.475228299963554e-06,
+ "loss": 0.5526,
+ "step": 481
+ },
+ {
+ "epoch": 1.5073068893528183,
+ "grad_norm": 0.8600060343742371,
+ "learning_rate": 4.4725713117733936e-06,
+ "loss": 0.5051,
+ "step": 482
+ },
+ {
+ "epoch": 1.510438413361169,
+ "grad_norm": 0.6934283971786499,
+ "learning_rate": 4.46990840717975e-06,
+ "loss": 0.5564,
+ "step": 483
+ },
+ {
+ "epoch": 1.5135699373695197,
+ "grad_norm": 0.8927920460700989,
+ "learning_rate": 4.46723959416957e-06,
+ "loss": 0.5529,
+ "step": 484
+ },
+ {
+ "epoch": 1.5167014613778704,
+ "grad_norm": 0.9570988416671753,
+ "learning_rate": 4.464564880747517e-06,
+ "loss": 0.5661,
+ "step": 485
+ },
+ {
+ "epoch": 1.5198329853862211,
+ "grad_norm": 0.7229202389717102,
+ "learning_rate": 4.461884274935956e-06,
+ "loss": 0.5964,
+ "step": 486
+ },
+ {
+ "epoch": 1.522964509394572,
+ "grad_norm": 0.7367239594459534,
+ "learning_rate": 4.4591977847749225e-06,
+ "loss": 0.5455,
+ "step": 487
+ },
+ {
+ "epoch": 1.5260960334029228,
+ "grad_norm": 0.8062120676040649,
+ "learning_rate": 4.456505418322103e-06,
+ "loss": 0.5735,
+ "step": 488
+ },
+ {
+ "epoch": 1.5292275574112735,
+ "grad_norm": 0.8854482769966125,
+ "learning_rate": 4.453807183652808e-06,
+ "loss": 0.5421,
+ "step": 489
+ },
+ {
+ "epoch": 1.5323590814196242,
+ "grad_norm": 0.7518959045410156,
+ "learning_rate": 4.451103088859951e-06,
+ "loss": 0.5083,
+ "step": 490
+ },
+ {
+ "epoch": 1.535490605427975,
+ "grad_norm": 0.8621206879615784,
+ "learning_rate": 4.448393142054016e-06,
+ "loss": 0.4712,
+ "step": 491
+ },
+ {
+ "epoch": 1.5386221294363258,
+ "grad_norm": 1.0618741512298584,
+ "learning_rate": 4.445677351363046e-06,
+ "loss": 0.5808,
+ "step": 492
+ },
+ {
+ "epoch": 1.5417536534446765,
+ "grad_norm": 0.8261345028877258,
+ "learning_rate": 4.442955724932607e-06,
+ "loss": 0.5625,
+ "step": 493
+ },
+ {
+ "epoch": 1.5448851774530272,
+ "grad_norm": 0.7067139744758606,
+ "learning_rate": 4.440228270925772e-06,
+ "loss": 0.5661,
+ "step": 494
+ },
+ {
+ "epoch": 1.548016701461378,
+ "grad_norm": 0.9234416484832764,
+ "learning_rate": 4.437494997523091e-06,
+ "loss": 0.5428,
+ "step": 495
+ },
+ {
+ "epoch": 1.5511482254697286,
+ "grad_norm": 0.9273470044136047,
+ "learning_rate": 4.434755912922567e-06,
+ "loss": 0.5388,
+ "step": 496
+ },
+ {
+ "epoch": 1.5542797494780793,
+ "grad_norm": 1.0163263082504272,
+ "learning_rate": 4.4320110253396345e-06,
+ "loss": 0.5409,
+ "step": 497
+ },
+ {
+ "epoch": 1.55741127348643,
+ "grad_norm": 0.9542096853256226,
+ "learning_rate": 4.429260343007133e-06,
+ "loss": 0.5329,
+ "step": 498
+ },
+ {
+ "epoch": 1.5605427974947808,
+ "grad_norm": 0.8076801896095276,
+ "learning_rate": 4.426503874175283e-06,
+ "loss": 0.5616,
+ "step": 499
+ },
+ {
+ "epoch": 1.5636743215031315,
+ "grad_norm": 1.0063767433166504,
+ "learning_rate": 4.423741627111658e-06,
+ "loss": 0.5369,
+ "step": 500
+ },
+ {
+ "epoch": 1.5668058455114822,
+ "grad_norm": 1.040286898612976,
+ "learning_rate": 4.420973610101166e-06,
+ "loss": 0.5474,
+ "step": 501
+ },
+ {
+ "epoch": 1.5699373695198329,
+ "grad_norm": 0.7832860946655273,
+ "learning_rate": 4.4181998314460164e-06,
+ "loss": 0.5486,
+ "step": 502
+ },
+ {
+ "epoch": 1.5730688935281836,
+ "grad_norm": 0.8162257075309753,
+ "learning_rate": 4.415420299465706e-06,
+ "loss": 0.5054,
+ "step": 503
+ },
+ {
+ "epoch": 1.5762004175365343,
+ "grad_norm": 0.9108433127403259,
+ "learning_rate": 4.4126350224969814e-06,
+ "loss": 0.5399,
+ "step": 504
+ },
+ {
+ "epoch": 1.5793319415448852,
+ "grad_norm": 0.8002520799636841,
+ "learning_rate": 4.409844008893824e-06,
+ "loss": 0.5485,
+ "step": 505
+ },
+ {
+ "epoch": 1.582463465553236,
+ "grad_norm": 0.8543248772621155,
+ "learning_rate": 4.407047267027423e-06,
+ "loss": 0.4984,
+ "step": 506
+ },
+ {
+ "epoch": 1.5855949895615866,
+ "grad_norm": 0.7154155373573303,
+ "learning_rate": 4.404244805286141e-06,
+ "loss": 0.5392,
+ "step": 507
+ },
+ {
+ "epoch": 1.5887265135699373,
+ "grad_norm": 0.818553626537323,
+ "learning_rate": 4.401436632075504e-06,
+ "loss": 0.5178,
+ "step": 508
+ },
+ {
+ "epoch": 1.5918580375782883,
+ "grad_norm": 0.7535017728805542,
+ "learning_rate": 4.398622755818167e-06,
+ "loss": 0.5446,
+ "step": 509
+ },
+ {
+ "epoch": 1.594989561586639,
+ "grad_norm": 0.9328975677490234,
+ "learning_rate": 4.395803184953889e-06,
+ "loss": 0.5546,
+ "step": 510
+ },
+ {
+ "epoch": 1.5981210855949897,
+ "grad_norm": 0.7960026860237122,
+ "learning_rate": 4.392977927939508e-06,
+ "loss": 0.5451,
+ "step": 511
+ },
+ {
+ "epoch": 1.6012526096033404,
+ "grad_norm": 0.9686267971992493,
+ "learning_rate": 4.3901469932489195e-06,
+ "loss": 0.5198,
+ "step": 512
+ },
+ {
+ "epoch": 1.604384133611691,
+ "grad_norm": 0.903137743473053,
+ "learning_rate": 4.387310389373047e-06,
+ "loss": 0.5395,
+ "step": 513
+ },
+ {
+ "epoch": 1.6075156576200418,
+ "grad_norm": 1.0728516578674316,
+ "learning_rate": 4.384468124819816e-06,
+ "loss": 0.5843,
+ "step": 514
+ },
+ {
+ "epoch": 1.6106471816283925,
+ "grad_norm": 1.0245436429977417,
+ "learning_rate": 4.3816202081141345e-06,
+ "loss": 0.5672,
+ "step": 515
+ },
+ {
+ "epoch": 1.6137787056367432,
+ "grad_norm": 0.9672732353210449,
+ "learning_rate": 4.378766647797858e-06,
+ "loss": 0.5369,
+ "step": 516
+ },
+ {
+ "epoch": 1.616910229645094,
+ "grad_norm": 0.9149513840675354,
+ "learning_rate": 4.375907452429774e-06,
+ "loss": 0.4628,
+ "step": 517
+ },
+ {
+ "epoch": 1.6200417536534446,
+ "grad_norm": 0.7543843984603882,
+ "learning_rate": 4.373042630585567e-06,
+ "loss": 0.5344,
+ "step": 518
+ },
+ {
+ "epoch": 1.6231732776617953,
+ "grad_norm": 0.7589017152786255,
+ "learning_rate": 4.370172190857801e-06,
+ "loss": 0.5672,
+ "step": 519
+ },
+ {
+ "epoch": 1.626304801670146,
+ "grad_norm": 0.803040623664856,
+ "learning_rate": 4.367296141855887e-06,
+ "loss": 0.5313,
+ "step": 520
+ },
+ {
+ "epoch": 1.6294363256784967,
+ "grad_norm": 0.8305794596672058,
+ "learning_rate": 4.3644144922060625e-06,
+ "loss": 0.5754,
+ "step": 521
+ },
+ {
+ "epoch": 1.6325678496868476,
+ "grad_norm": 1.0086486339569092,
+ "learning_rate": 4.361527250551361e-06,
+ "loss": 0.5433,
+ "step": 522
+ },
+ {
+ "epoch": 1.6356993736951984,
+ "grad_norm": 0.7217550277709961,
+ "learning_rate": 4.35863442555159e-06,
+ "loss": 0.524,
+ "step": 523
+ },
+ {
+ "epoch": 1.638830897703549,
+ "grad_norm": 0.7788524627685547,
+ "learning_rate": 4.355736025883303e-06,
+ "loss": 0.536,
+ "step": 524
+ },
+ {
+ "epoch": 1.6419624217118998,
+ "grad_norm": 0.8460550904273987,
+ "learning_rate": 4.352832060239774e-06,
+ "loss": 0.5381,
+ "step": 525
+ },
+ {
+ "epoch": 1.6450939457202505,
+ "grad_norm": 0.7571215033531189,
+ "learning_rate": 4.3499225373309675e-06,
+ "loss": 0.541,
+ "step": 526
+ },
+ {
+ "epoch": 1.6482254697286014,
+ "grad_norm": 0.7343226671218872,
+ "learning_rate": 4.347007465883523e-06,
+ "loss": 0.5147,
+ "step": 527
+ },
+ {
+ "epoch": 1.651356993736952,
+ "grad_norm": 0.7271892428398132,
+ "learning_rate": 4.3440868546407165e-06,
+ "loss": 0.5311,
+ "step": 528
+ },
+ {
+ "epoch": 1.6544885177453028,
+ "grad_norm": 0.8166136741638184,
+ "learning_rate": 4.341160712362442e-06,
+ "loss": 0.5379,
+ "step": 529
+ },
+ {
+ "epoch": 1.6576200417536535,
+ "grad_norm": 1.5985233783721924,
+ "learning_rate": 4.338229047825182e-06,
+ "loss": 0.5782,
+ "step": 530
+ },
+ {
+ "epoch": 1.6607515657620042,
+ "grad_norm": 0.7835702896118164,
+ "learning_rate": 4.3352918698219835e-06,
+ "loss": 0.525,
+ "step": 531
+ },
+ {
+ "epoch": 1.663883089770355,
+ "grad_norm": 0.7278687953948975,
+ "learning_rate": 4.332349187162428e-06,
+ "loss": 0.5266,
+ "step": 532
+ },
+ {
+ "epoch": 1.6670146137787056,
+ "grad_norm": 0.8240190148353577,
+ "learning_rate": 4.329401008672608e-06,
+ "loss": 0.5515,
+ "step": 533
+ },
+ {
+ "epoch": 1.6701461377870563,
+ "grad_norm": 0.9447080492973328,
+ "learning_rate": 4.326447343195102e-06,
+ "loss": 0.5596,
+ "step": 534
+ },
+ {
+ "epoch": 1.673277661795407,
+ "grad_norm": 0.7827372550964355,
+ "learning_rate": 4.323488199588944e-06,
+ "loss": 0.5466,
+ "step": 535
+ },
+ {
+ "epoch": 1.6764091858037578,
+ "grad_norm": 0.9252517223358154,
+ "learning_rate": 4.320523586729599e-06,
+ "loss": 0.5433,
+ "step": 536
+ },
+ {
+ "epoch": 1.6795407098121085,
+ "grad_norm": 0.9437504410743713,
+ "learning_rate": 4.317553513508934e-06,
+ "loss": 0.5552,
+ "step": 537
+ },
+ {
+ "epoch": 1.6826722338204592,
+ "grad_norm": 0.8972746133804321,
+ "learning_rate": 4.3145779888351986e-06,
+ "loss": 0.5259,
+ "step": 538
+ },
+ {
+ "epoch": 1.6858037578288099,
+ "grad_norm": 0.8017446994781494,
+ "learning_rate": 4.311597021632988e-06,
+ "loss": 0.5263,
+ "step": 539
+ },
+ {
+ "epoch": 1.6889352818371608,
+ "grad_norm": 0.7875497341156006,
+ "learning_rate": 4.3086106208432235e-06,
+ "loss": 0.5316,
+ "step": 540
+ },
+ {
+ "epoch": 1.6920668058455115,
+ "grad_norm": 0.8204905986785889,
+ "learning_rate": 4.305618795423125e-06,
+ "loss": 0.5506,
+ "step": 541
+ },
+ {
+ "epoch": 1.6951983298538622,
+ "grad_norm": 0.888359785079956,
+ "learning_rate": 4.30262155434618e-06,
+ "loss": 0.4825,
+ "step": 542
+ },
+ {
+ "epoch": 1.698329853862213,
+ "grad_norm": 1.1026058197021484,
+ "learning_rate": 4.29961890660212e-06,
+ "loss": 0.5321,
+ "step": 543
+ },
+ {
+ "epoch": 1.7014613778705638,
+ "grad_norm": 0.7662535905838013,
+ "learning_rate": 4.2966108611968945e-06,
+ "loss": 0.5432,
+ "step": 544
+ },
+ {
+ "epoch": 1.7045929018789145,
+ "grad_norm": 1.1951749324798584,
+ "learning_rate": 4.293597427152641e-06,
+ "loss": 0.5123,
+ "step": 545
+ },
+ {
+ "epoch": 1.7077244258872653,
+ "grad_norm": 1.303183913230896,
+ "learning_rate": 4.290578613507661e-06,
+ "loss": 0.5346,
+ "step": 546
+ },
+ {
+ "epoch": 1.710855949895616,
+ "grad_norm": 0.7653357982635498,
+ "learning_rate": 4.287554429316387e-06,
+ "loss": 0.5397,
+ "step": 547
+ },
+ {
+ "epoch": 1.7139874739039667,
+ "grad_norm": 0.796215295791626,
+ "learning_rate": 4.284524883649366e-06,
+ "loss": 0.5421,
+ "step": 548
+ },
+ {
+ "epoch": 1.7171189979123174,
+ "grad_norm": 0.7599332332611084,
+ "learning_rate": 4.281489985593219e-06,
+ "loss": 0.5289,
+ "step": 549
+ },
+ {
+ "epoch": 1.720250521920668,
+ "grad_norm": 0.8029115796089172,
+ "learning_rate": 4.2784497442506265e-06,
+ "loss": 0.5409,
+ "step": 550
+ },
+ {
+ "epoch": 1.7233820459290188,
+ "grad_norm": 0.7194099426269531,
+ "learning_rate": 4.275404168740291e-06,
+ "loss": 0.5327,
+ "step": 551
+ },
+ {
+ "epoch": 1.7265135699373695,
+ "grad_norm": 0.7960740923881531,
+ "learning_rate": 4.272353268196917e-06,
+ "loss": 0.4896,
+ "step": 552
+ },
+ {
+ "epoch": 1.7296450939457202,
+ "grad_norm": 0.9572116732597351,
+ "learning_rate": 4.269297051771178e-06,
+ "loss": 0.5402,
+ "step": 553
+ },
+ {
+ "epoch": 1.732776617954071,
+ "grad_norm": 1.3604938983917236,
+ "learning_rate": 4.266235528629695e-06,
+ "loss": 0.5792,
+ "step": 554
+ },
+ {
+ "epoch": 1.7359081419624216,
+ "grad_norm": 2.067286729812622,
+ "learning_rate": 4.263168707955002e-06,
+ "loss": 0.5033,
+ "step": 555
+ },
+ {
+ "epoch": 1.7390396659707723,
+ "grad_norm": 0.8031097054481506,
+ "learning_rate": 4.260096598945523e-06,
+ "loss": 0.5117,
+ "step": 556
+ },
+ {
+ "epoch": 1.742171189979123,
+ "grad_norm": 1.0241729021072388,
+ "learning_rate": 4.257019210815546e-06,
+ "loss": 0.5359,
+ "step": 557
+ },
+ {
+ "epoch": 1.745302713987474,
+ "grad_norm": 0.7625218629837036,
+ "learning_rate": 4.25393655279519e-06,
+ "loss": 0.5625,
+ "step": 558
+ },
+ {
+ "epoch": 1.7484342379958246,
+ "grad_norm": 0.8603503704071045,
+ "learning_rate": 4.250848634130381e-06,
+ "loss": 0.5043,
+ "step": 559
+ },
+ {
+ "epoch": 1.7515657620041754,
+ "grad_norm": 0.9543750286102295,
+ "learning_rate": 4.247755464082824e-06,
+ "loss": 0.5364,
+ "step": 560
+ },
+ {
+ "epoch": 1.754697286012526,
+ "grad_norm": 0.9707463979721069,
+ "learning_rate": 4.244657051929973e-06,
+ "loss": 0.5184,
+ "step": 561
+ },
+ {
+ "epoch": 1.757828810020877,
+ "grad_norm": 0.7491432428359985,
+ "learning_rate": 4.241553406965008e-06,
+ "loss": 0.559,
+ "step": 562
+ },
+ {
+ "epoch": 1.7609603340292277,
+ "grad_norm": 0.7444972991943359,
+ "learning_rate": 4.238444538496801e-06,
+ "loss": 0.5327,
+ "step": 563
+ },
+ {
+ "epoch": 1.7640918580375784,
+ "grad_norm": 2.7108678817749023,
+ "learning_rate": 4.235330455849892e-06,
+ "loss": 0.55,
+ "step": 564
+ },
+ {
+ "epoch": 1.767223382045929,
+ "grad_norm": 1.6716049909591675,
+ "learning_rate": 4.232211168364459e-06,
+ "loss": 0.5093,
+ "step": 565
+ },
+ {
+ "epoch": 1.7703549060542798,
+ "grad_norm": 0.7023475170135498,
+ "learning_rate": 4.229086685396295e-06,
+ "loss": 0.569,
+ "step": 566
+ },
+ {
+ "epoch": 1.7734864300626305,
+ "grad_norm": 0.8596265316009521,
+ "learning_rate": 4.225957016316771e-06,
+ "loss": 0.5128,
+ "step": 567
+ },
+ {
+ "epoch": 1.7766179540709812,
+ "grad_norm": 0.8110849857330322,
+ "learning_rate": 4.222822170512816e-06,
+ "loss": 0.5142,
+ "step": 568
+ },
+ {
+ "epoch": 1.779749478079332,
+ "grad_norm": 0.7583725452423096,
+ "learning_rate": 4.219682157386884e-06,
+ "loss": 0.5584,
+ "step": 569
+ },
+ {
+ "epoch": 1.7828810020876826,
+ "grad_norm": 0.787811279296875,
+ "learning_rate": 4.21653698635693e-06,
+ "loss": 0.5068,
+ "step": 570
+ },
+ {
+ "epoch": 1.7860125260960333,
+ "grad_norm": 0.8298993110656738,
+ "learning_rate": 4.213386666856375e-06,
+ "loss": 0.5496,
+ "step": 571
+ },
+ {
+ "epoch": 1.789144050104384,
+ "grad_norm": 0.8999841213226318,
+ "learning_rate": 4.210231208334087e-06,
+ "loss": 0.5454,
+ "step": 572
+ },
+ {
+ "epoch": 1.7922755741127347,
+ "grad_norm": 4.264521598815918,
+ "learning_rate": 4.207070620254345e-06,
+ "loss": 0.5486,
+ "step": 573
+ },
+ {
+ "epoch": 1.7954070981210855,
+ "grad_norm": 0.8517448306083679,
+ "learning_rate": 4.203904912096812e-06,
+ "loss": 0.5566,
+ "step": 574
+ },
+ {
+ "epoch": 1.7985386221294362,
+ "grad_norm": 0.9230182766914368,
+ "learning_rate": 4.200734093356511e-06,
+ "loss": 0.4964,
+ "step": 575
+ },
+ {
+ "epoch": 1.801670146137787,
+ "grad_norm": 1.224039912223816,
+ "learning_rate": 4.197558173543791e-06,
+ "loss": 0.5356,
+ "step": 576
+ },
+ {
+ "epoch": 1.8048016701461378,
+ "grad_norm": 0.9998573660850525,
+ "learning_rate": 4.194377162184301e-06,
+ "loss": 0.5334,
+ "step": 577
+ },
+ {
+ "epoch": 1.8079331941544885,
+ "grad_norm": 0.865521252155304,
+ "learning_rate": 4.191191068818963e-06,
+ "loss": 0.5036,
+ "step": 578
+ },
+ {
+ "epoch": 1.8110647181628392,
+ "grad_norm": 0.8048138618469238,
+ "learning_rate": 4.18799990300394e-06,
+ "loss": 0.4979,
+ "step": 579
+ },
+ {
+ "epoch": 1.8141962421711901,
+ "grad_norm": 0.717815637588501,
+ "learning_rate": 4.184803674310609e-06,
+ "loss": 0.5623,
+ "step": 580
+ },
+ {
+ "epoch": 1.8173277661795408,
+ "grad_norm": 0.8403327465057373,
+ "learning_rate": 4.1816023923255335e-06,
+ "loss": 0.5055,
+ "step": 581
+ },
+ {
+ "epoch": 1.8204592901878915,
+ "grad_norm": 0.7298995852470398,
+ "learning_rate": 4.178396066650432e-06,
+ "loss": 0.5641,
+ "step": 582
+ },
+ {
+ "epoch": 1.8235908141962422,
+ "grad_norm": 0.9469727873802185,
+ "learning_rate": 4.1751847069021516e-06,
+ "loss": 0.5557,
+ "step": 583
+ },
+ {
+ "epoch": 1.826722338204593,
+ "grad_norm": 0.8641784191131592,
+ "learning_rate": 4.1719683227126386e-06,
+ "loss": 0.5153,
+ "step": 584
+ },
+ {
+ "epoch": 1.8298538622129437,
+ "grad_norm": 0.7316668629646301,
+ "learning_rate": 4.168746923728908e-06,
+ "loss": 0.4988,
+ "step": 585
+ },
+ {
+ "epoch": 1.8329853862212944,
+ "grad_norm": 0.8795468807220459,
+ "learning_rate": 4.165520519613017e-06,
+ "loss": 0.5483,
+ "step": 586
+ },
+ {
+ "epoch": 1.836116910229645,
+ "grad_norm": 0.7323560118675232,
+ "learning_rate": 4.162289120042034e-06,
+ "loss": 0.5194,
+ "step": 587
+ },
+ {
+ "epoch": 1.8392484342379958,
+ "grad_norm": 0.8217021822929382,
+ "learning_rate": 4.159052734708013e-06,
+ "loss": 0.532,
+ "step": 588
+ },
+ {
+ "epoch": 1.8423799582463465,
+ "grad_norm": 0.7669674754142761,
+ "learning_rate": 4.155811373317958e-06,
+ "loss": 0.541,
+ "step": 589
+ },
+ {
+ "epoch": 1.8455114822546972,
+ "grad_norm": 0.8312156200408936,
+ "learning_rate": 4.152565045593801e-06,
+ "loss": 0.5298,
+ "step": 590
+ },
+ {
+ "epoch": 1.848643006263048,
+ "grad_norm": 0.8967565298080444,
+ "learning_rate": 4.1493137612723665e-06,
+ "loss": 0.51,
+ "step": 591
+ },
+ {
+ "epoch": 1.8517745302713986,
+ "grad_norm": 0.8706664443016052,
+ "learning_rate": 4.14605753010535e-06,
+ "loss": 0.4941,
+ "step": 592
+ },
+ {
+ "epoch": 1.8549060542797495,
+ "grad_norm": 0.7585753798484802,
+ "learning_rate": 4.14279636185928e-06,
+ "loss": 0.5161,
+ "step": 593
+ },
+ {
+ "epoch": 1.8580375782881002,
+ "grad_norm": 0.7495241165161133,
+ "learning_rate": 4.1395302663154954e-06,
+ "loss": 0.5388,
+ "step": 594
+ },
+ {
+ "epoch": 1.861169102296451,
+ "grad_norm": 1.0746862888336182,
+ "learning_rate": 4.136259253270114e-06,
+ "loss": 0.4976,
+ "step": 595
+ },
+ {
+ "epoch": 1.8643006263048016,
+ "grad_norm": 0.872309684753418,
+ "learning_rate": 4.132983332534e-06,
+ "loss": 0.559,
+ "step": 596
+ },
+ {
+ "epoch": 1.8674321503131524,
+ "grad_norm": 0.8759891986846924,
+ "learning_rate": 4.1297025139327405e-06,
+ "loss": 0.5436,
+ "step": 597
+ },
+ {
+ "epoch": 1.8705636743215033,
+ "grad_norm": 1.1044493913650513,
+ "learning_rate": 4.126416807306611e-06,
+ "loss": 0.5476,
+ "step": 598
+ },
+ {
+ "epoch": 1.873695198329854,
+ "grad_norm": 0.8340442180633545,
+ "learning_rate": 4.123126222510549e-06,
+ "loss": 0.4592,
+ "step": 599
+ },
+ {
+ "epoch": 1.8768267223382047,
+ "grad_norm": 0.8331449031829834,
+ "learning_rate": 4.119830769414123e-06,
+ "loss": 0.5219,
+ "step": 600
+ },
+ {
+ "epoch": 1.8799582463465554,
+ "grad_norm": 1.0862973928451538,
+ "learning_rate": 4.116530457901503e-06,
+ "loss": 0.5159,
+ "step": 601
+ },
+ {
+ "epoch": 1.883089770354906,
+ "grad_norm": 0.8524414300918579,
+ "learning_rate": 4.113225297871431e-06,
+ "loss": 0.5502,
+ "step": 602
+ },
+ {
+ "epoch": 1.8862212943632568,
+ "grad_norm": 1.4945416450500488,
+ "learning_rate": 4.10991529923719e-06,
+ "loss": 0.5627,
+ "step": 603
+ },
+ {
+ "epoch": 1.8893528183716075,
+ "grad_norm": 1.5518157482147217,
+ "learning_rate": 4.10660047192658e-06,
+ "loss": 0.5517,
+ "step": 604
+ },
+ {
+ "epoch": 1.8924843423799582,
+ "grad_norm": 2.56638765335083,
+ "learning_rate": 4.103280825881878e-06,
+ "loss": 0.5422,
+ "step": 605
+ },
+ {
+ "epoch": 1.895615866388309,
+ "grad_norm": 0.867254912853241,
+ "learning_rate": 4.099956371059817e-06,
+ "loss": 0.4991,
+ "step": 606
+ },
+ {
+ "epoch": 1.8987473903966596,
+ "grad_norm": 0.9555892944335938,
+ "learning_rate": 4.096627117431554e-06,
+ "loss": 0.5339,
+ "step": 607
+ },
+ {
+ "epoch": 1.9018789144050103,
+ "grad_norm": 0.7905483245849609,
+ "learning_rate": 4.093293074982638e-06,
+ "loss": 0.5168,
+ "step": 608
+ },
+ {
+ "epoch": 1.905010438413361,
+ "grad_norm": 0.7500227093696594,
+ "learning_rate": 4.089954253712981e-06,
+ "loss": 0.5096,
+ "step": 609
+ },
+ {
+ "epoch": 1.9081419624217117,
+ "grad_norm": 0.8458324074745178,
+ "learning_rate": 4.086610663636828e-06,
+ "loss": 0.5296,
+ "step": 610
+ },
+ {
+ "epoch": 1.9112734864300627,
+ "grad_norm": 0.7392706871032715,
+ "learning_rate": 4.08326231478273e-06,
+ "loss": 0.5305,
+ "step": 611
+ },
+ {
+ "epoch": 1.9144050104384134,
+ "grad_norm": 0.8113343715667725,
+ "learning_rate": 4.079909217193508e-06,
+ "loss": 0.5044,
+ "step": 612
+ },
+ {
+ "epoch": 1.917536534446764,
+ "grad_norm": 0.7637801766395569,
+ "learning_rate": 4.076551380926226e-06,
+ "loss": 0.5298,
+ "step": 613
+ },
+ {
+ "epoch": 1.9206680584551148,
+ "grad_norm": 1.0523375272750854,
+ "learning_rate": 4.073188816052164e-06,
+ "loss": 0.5111,
+ "step": 614
+ },
+ {
+ "epoch": 1.9237995824634657,
+ "grad_norm": 0.8224868774414062,
+ "learning_rate": 4.069821532656781e-06,
+ "loss": 0.5178,
+ "step": 615
+ },
+ {
+ "epoch": 1.9269311064718164,
+ "grad_norm": 0.7270777821540833,
+ "learning_rate": 4.066449540839693e-06,
+ "loss": 0.5307,
+ "step": 616
+ },
+ {
+ "epoch": 1.9300626304801671,
+ "grad_norm": 0.7214602828025818,
+ "learning_rate": 4.063072850714631e-06,
+ "loss": 0.5171,
+ "step": 617
+ },
+ {
+ "epoch": 1.9331941544885178,
+ "grad_norm": 0.7333671450614929,
+ "learning_rate": 4.059691472409426e-06,
+ "loss": 0.56,
+ "step": 618
+ },
+ {
+ "epoch": 1.9363256784968685,
+ "grad_norm": 0.9166824221611023,
+ "learning_rate": 4.056305416065964e-06,
+ "loss": 0.5388,
+ "step": 619
+ },
+ {
+ "epoch": 1.9394572025052192,
+ "grad_norm": 0.7743303775787354,
+ "learning_rate": 4.052914691840167e-06,
+ "loss": 0.5134,
+ "step": 620
+ },
+ {
+ "epoch": 1.94258872651357,
+ "grad_norm": 0.704097330570221,
+ "learning_rate": 4.0495193099019524e-06,
+ "loss": 0.4926,
+ "step": 621
+ },
+ {
+ "epoch": 1.9457202505219207,
+ "grad_norm": 0.8508503437042236,
+ "learning_rate": 4.046119280435212e-06,
+ "loss": 0.5008,
+ "step": 622
+ },
+ {
+ "epoch": 1.9488517745302714,
+ "grad_norm": 0.725933313369751,
+ "learning_rate": 4.042714613637775e-06,
+ "loss": 0.5549,
+ "step": 623
+ },
+ {
+ "epoch": 1.951983298538622,
+ "grad_norm": 0.8919175863265991,
+ "learning_rate": 4.039305319721381e-06,
+ "loss": 0.5183,
+ "step": 624
+ },
+ {
+ "epoch": 1.9551148225469728,
+ "grad_norm": 0.827919065952301,
+ "learning_rate": 4.035891408911644e-06,
+ "loss": 0.5624,
+ "step": 625
+ },
+ {
+ "epoch": 1.9582463465553235,
+ "grad_norm": 0.7415187358856201,
+ "learning_rate": 4.032472891448032e-06,
+ "loss": 0.5454,
+ "step": 626
+ },
+ {
+ "epoch": 1.9613778705636742,
+ "grad_norm": 0.7675788998603821,
+ "learning_rate": 4.029049777583824e-06,
+ "loss": 0.5361,
+ "step": 627
+ },
+ {
+ "epoch": 1.964509394572025,
+ "grad_norm": 0.8464030623435974,
+ "learning_rate": 4.025622077586088e-06,
+ "loss": 0.5295,
+ "step": 628
+ },
+ {
+ "epoch": 1.9676409185803758,
+ "grad_norm": 0.7641633749008179,
+ "learning_rate": 4.022189801735646e-06,
+ "loss": 0.55,
+ "step": 629
+ },
+ {
+ "epoch": 1.9707724425887265,
+ "grad_norm": 0.7813227772712708,
+ "learning_rate": 4.018752960327048e-06,
+ "loss": 0.5587,
+ "step": 630
+ },
+ {
+ "epoch": 1.9739039665970772,
+ "grad_norm": 0.7576701641082764,
+ "learning_rate": 4.015311563668533e-06,
+ "loss": 0.5413,
+ "step": 631
+ },
+ {
+ "epoch": 1.977035490605428,
+ "grad_norm": 0.6949650049209595,
+ "learning_rate": 4.011865622082004e-06,
+ "loss": 0.5344,
+ "step": 632
+ },
+ {
+ "epoch": 1.9801670146137789,
+ "grad_norm": 0.9009145498275757,
+ "learning_rate": 4.008415145902997e-06,
+ "loss": 0.5233,
+ "step": 633
+ },
+ {
+ "epoch": 1.9832985386221296,
+ "grad_norm": 0.7635822892189026,
+ "learning_rate": 4.004960145480651e-06,
+ "loss": 0.4981,
+ "step": 634
+ },
+ {
+ "epoch": 1.9864300626304803,
+ "grad_norm": 0.8916334509849548,
+ "learning_rate": 4.0015006311776685e-06,
+ "loss": 0.5311,
+ "step": 635
+ },
+ {
+ "epoch": 1.989561586638831,
+ "grad_norm": 0.7197673320770264,
+ "learning_rate": 3.998036613370295e-06,
+ "loss": 0.5361,
+ "step": 636
+ },
+ {
+ "epoch": 1.9926931106471817,
+ "grad_norm": 0.8391228914260864,
+ "learning_rate": 3.994568102448284e-06,
+ "loss": 0.5473,
+ "step": 637
+ },
+ {
+ "epoch": 1.9958246346555324,
+ "grad_norm": 0.9371750950813293,
+ "learning_rate": 3.991095108814862e-06,
+ "loss": 0.5303,
+ "step": 638
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1914,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 319,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 1.9343570238741414e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-638/training_args.bin b/checkpoint-638/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..8067ee9c1c0bc752bdfd00cfcaf1a6e717d2356b
--- /dev/null
+++ b/checkpoint-638/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c650156a192cae0a2070c4323ee8a93e9b52fb76041d59ae0633b98389585727
+size 7928
diff --git a/checkpoint-638/zero_to_fp32.py b/checkpoint-638/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-638/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-957/README.md b/checkpoint-957/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..f4a3934800eeb082a0cb833d7b6af4f68eed3615
--- /dev/null
+++ b/checkpoint-957/README.md
@@ -0,0 +1,202 @@
+---
+base_model: nvidia/Llama-3_3-Nemotron-Super-49B-v1
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-957/adapter_config.json b/checkpoint-957/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..1edb33780e2306c6b19fd727be8e9b8b35f237c4
--- /dev/null
+++ b/checkpoint-957/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "gate_proj",
+ "k_proj",
+ "down_proj",
+ "o_proj",
+ "v_proj",
+ "up_proj",
+ "q_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-957/adapter_model.safetensors b/checkpoint-957/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a8c9d6b843d8111d855bbc5eb986462e822d368b
--- /dev/null
+++ b/checkpoint-957/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2c4fba160182b44f123fa5befe8bedc341afb7e8211a9cd9bc3fbca99ca305b5
+size 9016826528
diff --git a/checkpoint-957/global_step955/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-957/global_step955/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a724faedd79f5382b05276d9a359dcc1ede25443
--- /dev/null
+++ b/checkpoint-957/global_step955/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:40076c054b3488882b5e837f16109e83c3315347b3ca4f298824bdcd2c9bacb4
+size 27050164444
diff --git a/checkpoint-957/global_step955/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-957/global_step955/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..874a416bdc64c3ad0c3c7031a834e25d15e4a672
--- /dev/null
+++ b/checkpoint-957/global_step955/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:eb9a32d60802d2a9b844ff380cf1876a06f7f0646db0c424c6579d3777bfdfdd
+size 27050169884
diff --git a/checkpoint-957/global_step955/mp_rank_00_model_states.pt b/checkpoint-957/global_step955/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0644f96cb6aab15fa0e1df62cedea128c2ceeadb
--- /dev/null
+++ b/checkpoint-957/global_step955/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2ecf4f5c9ba44f29a9e7ec10c139aad4b1f18fe5ef5681fa50628d9856f911a6
+size 9776788601
diff --git a/checkpoint-957/latest b/checkpoint-957/latest
new file mode 100644
index 0000000000000000000000000000000000000000..a38f3310777331219ffe849f9792af39e25062aa
--- /dev/null
+++ b/checkpoint-957/latest
@@ -0,0 +1 @@
+global_step955
\ No newline at end of file
diff --git a/checkpoint-957/rng_state_0.pth b/checkpoint-957/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..cd433dc6ccf2ddc46e1878a11dadf9e5c078b35e
--- /dev/null
+++ b/checkpoint-957/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:dc4f5c99c78e3b43d8fbf8b1690558a76dbbc4bc1c01d9b02588cf55acbab3f5
+size 14512
diff --git a/checkpoint-957/rng_state_1.pth b/checkpoint-957/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..1566f5c64389939de8b378c9754ada5a631dc736
--- /dev/null
+++ b/checkpoint-957/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3ea50dbf438d2f5ead49adeca5dc89fe039ae8b6ae03733eebb2d3e7f23a212c
+size 14512
diff --git a/checkpoint-957/scheduler.pt b/checkpoint-957/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..3ab89c03232ef93703e3ba22df18f9336b987146
--- /dev/null
+++ b/checkpoint-957/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6d456ff9f9125960678c7ceb13f24f7866014562349aeaab4aaf28f8cd5da86e
+size 1064
diff --git a/checkpoint-957/special_tokens_map.json b/checkpoint-957/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-957/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-957/tokenizer.json b/checkpoint-957/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-957/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-957/tokenizer_config.json b/checkpoint-957/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/checkpoint-957/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-957/trainer_state.json b/checkpoint-957/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..5e71b3f0db3ed1ba12f9b2adb765e62899ac6c5f
--- /dev/null
+++ b/checkpoint-957/trainer_state.json
@@ -0,0 +1,6732 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.9926931106471817,
+ "eval_steps": 500,
+ "global_step": 957,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.003131524008350731,
+ "grad_norm": 13.917898178100586,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 4.1051,
+ "step": 1
+ },
+ {
+ "epoch": 0.006263048016701462,
+ "grad_norm": 17.327869415283203,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 4.1048,
+ "step": 2
+ },
+ {
+ "epoch": 0.009394572025052192,
+ "grad_norm": 14.063946723937988,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 4.0741,
+ "step": 3
+ },
+ {
+ "epoch": 0.012526096033402923,
+ "grad_norm": 16.817699432373047,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 4.2002,
+ "step": 4
+ },
+ {
+ "epoch": 0.015657620041753653,
+ "grad_norm": 14.47036361694336,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 4.2652,
+ "step": 5
+ },
+ {
+ "epoch": 0.018789144050104383,
+ "grad_norm": 14.474193572998047,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 4.0888,
+ "step": 6
+ },
+ {
+ "epoch": 0.021920668058455117,
+ "grad_norm": 14.865458488464355,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 4.0014,
+ "step": 7
+ },
+ {
+ "epoch": 0.025052192066805846,
+ "grad_norm": 15.338888168334961,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 4.13,
+ "step": 8
+ },
+ {
+ "epoch": 0.028183716075156576,
+ "grad_norm": 15.154336929321289,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 4.2493,
+ "step": 9
+ },
+ {
+ "epoch": 0.031315240083507306,
+ "grad_norm": 15.919597625732422,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 4.0535,
+ "step": 10
+ },
+ {
+ "epoch": 0.03444676409185804,
+ "grad_norm": 14.981926918029785,
+ "learning_rate": 5.5e-07,
+ "loss": 3.9064,
+ "step": 11
+ },
+ {
+ "epoch": 0.037578288100208766,
+ "grad_norm": 13.36101245880127,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 4.1939,
+ "step": 12
+ },
+ {
+ "epoch": 0.0407098121085595,
+ "grad_norm": 15.58773422241211,
+ "learning_rate": 6.5e-07,
+ "loss": 4.18,
+ "step": 13
+ },
+ {
+ "epoch": 0.04384133611691023,
+ "grad_norm": 13.560139656066895,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 3.9414,
+ "step": 14
+ },
+ {
+ "epoch": 0.04697286012526096,
+ "grad_norm": 12.307971954345703,
+ "learning_rate": 7.5e-07,
+ "loss": 3.8836,
+ "step": 15
+ },
+ {
+ "epoch": 0.05010438413361169,
+ "grad_norm": 14.533182144165039,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 4.1551,
+ "step": 16
+ },
+ {
+ "epoch": 0.05323590814196242,
+ "grad_norm": 13.453729629516602,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 4.0048,
+ "step": 17
+ },
+ {
+ "epoch": 0.05636743215031315,
+ "grad_norm": 13.45992374420166,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 4.0745,
+ "step": 18
+ },
+ {
+ "epoch": 0.059498956158663886,
+ "grad_norm": 11.857145309448242,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 3.9871,
+ "step": 19
+ },
+ {
+ "epoch": 0.06263048016701461,
+ "grad_norm": 11.872294425964355,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 3.8959,
+ "step": 20
+ },
+ {
+ "epoch": 0.06576200417536535,
+ "grad_norm": 12.969825744628906,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 4.0308,
+ "step": 21
+ },
+ {
+ "epoch": 0.06889352818371608,
+ "grad_norm": 12.33769416809082,
+ "learning_rate": 1.1e-06,
+ "loss": 3.9341,
+ "step": 22
+ },
+ {
+ "epoch": 0.0720250521920668,
+ "grad_norm": 12.669405937194824,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 3.8511,
+ "step": 23
+ },
+ {
+ "epoch": 0.07515657620041753,
+ "grad_norm": 10.677213668823242,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 3.7764,
+ "step": 24
+ },
+ {
+ "epoch": 0.07828810020876827,
+ "grad_norm": 10.366402626037598,
+ "learning_rate": 1.25e-06,
+ "loss": 3.5291,
+ "step": 25
+ },
+ {
+ "epoch": 0.081419624217119,
+ "grad_norm": 11.211421012878418,
+ "learning_rate": 1.3e-06,
+ "loss": 3.5765,
+ "step": 26
+ },
+ {
+ "epoch": 0.08455114822546973,
+ "grad_norm": 11.313716888427734,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 3.4849,
+ "step": 27
+ },
+ {
+ "epoch": 0.08768267223382047,
+ "grad_norm": 10.41294002532959,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 3.2653,
+ "step": 28
+ },
+ {
+ "epoch": 0.09081419624217119,
+ "grad_norm": 10.40064525604248,
+ "learning_rate": 1.45e-06,
+ "loss": 3.3384,
+ "step": 29
+ },
+ {
+ "epoch": 0.09394572025052192,
+ "grad_norm": 10.05427074432373,
+ "learning_rate": 1.5e-06,
+ "loss": 3.2257,
+ "step": 30
+ },
+ {
+ "epoch": 0.09707724425887265,
+ "grad_norm": 9.583163261413574,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 3.1371,
+ "step": 31
+ },
+ {
+ "epoch": 0.10020876826722339,
+ "grad_norm": 10.09977912902832,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 3.0658,
+ "step": 32
+ },
+ {
+ "epoch": 0.10334029227557412,
+ "grad_norm": 9.271486282348633,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 2.9693,
+ "step": 33
+ },
+ {
+ "epoch": 0.10647181628392484,
+ "grad_norm": 10.687992095947266,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 2.95,
+ "step": 34
+ },
+ {
+ "epoch": 0.10960334029227557,
+ "grad_norm": 8.762290000915527,
+ "learning_rate": 1.75e-06,
+ "loss": 2.8286,
+ "step": 35
+ },
+ {
+ "epoch": 0.1127348643006263,
+ "grad_norm": 10.13785171508789,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 2.3664,
+ "step": 36
+ },
+ {
+ "epoch": 0.11586638830897704,
+ "grad_norm": 18.301353454589844,
+ "learning_rate": 1.85e-06,
+ "loss": 2.5533,
+ "step": 37
+ },
+ {
+ "epoch": 0.11899791231732777,
+ "grad_norm": 11.490377426147461,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 2.6133,
+ "step": 38
+ },
+ {
+ "epoch": 0.12212943632567849,
+ "grad_norm": 15.614163398742676,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 2.3596,
+ "step": 39
+ },
+ {
+ "epoch": 0.12526096033402923,
+ "grad_norm": 17.757442474365234,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 2.3491,
+ "step": 40
+ },
+ {
+ "epoch": 0.12839248434237996,
+ "grad_norm": 17.18431854248047,
+ "learning_rate": 2.05e-06,
+ "loss": 2.2361,
+ "step": 41
+ },
+ {
+ "epoch": 0.1315240083507307,
+ "grad_norm": 16.149789810180664,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 2.1457,
+ "step": 42
+ },
+ {
+ "epoch": 0.13465553235908143,
+ "grad_norm": 15.256914138793945,
+ "learning_rate": 2.15e-06,
+ "loss": 2.12,
+ "step": 43
+ },
+ {
+ "epoch": 0.13778705636743216,
+ "grad_norm": 15.537406921386719,
+ "learning_rate": 2.2e-06,
+ "loss": 2.1877,
+ "step": 44
+ },
+ {
+ "epoch": 0.1409185803757829,
+ "grad_norm": 7.947713851928711,
+ "learning_rate": 2.25e-06,
+ "loss": 2.1648,
+ "step": 45
+ },
+ {
+ "epoch": 0.1440501043841336,
+ "grad_norm": 8.818676948547363,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 2.134,
+ "step": 46
+ },
+ {
+ "epoch": 0.14718162839248433,
+ "grad_norm": 5.175768852233887,
+ "learning_rate": 2.35e-06,
+ "loss": 2.0796,
+ "step": 47
+ },
+ {
+ "epoch": 0.15031315240083507,
+ "grad_norm": 6.750611305236816,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 1.9174,
+ "step": 48
+ },
+ {
+ "epoch": 0.1534446764091858,
+ "grad_norm": 6.2147979736328125,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 1.8065,
+ "step": 49
+ },
+ {
+ "epoch": 0.15657620041753653,
+ "grad_norm": 13.291611671447754,
+ "learning_rate": 2.5e-06,
+ "loss": 1.7061,
+ "step": 50
+ },
+ {
+ "epoch": 0.15970772442588727,
+ "grad_norm": 7.251201629638672,
+ "learning_rate": 2.55e-06,
+ "loss": 1.7924,
+ "step": 51
+ },
+ {
+ "epoch": 0.162839248434238,
+ "grad_norm": 5.2126054763793945,
+ "learning_rate": 2.6e-06,
+ "loss": 1.6735,
+ "step": 52
+ },
+ {
+ "epoch": 0.16597077244258873,
+ "grad_norm": 5.435528755187988,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 1.6265,
+ "step": 53
+ },
+ {
+ "epoch": 0.16910229645093947,
+ "grad_norm": 4.505807399749756,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 1.4851,
+ "step": 54
+ },
+ {
+ "epoch": 0.1722338204592902,
+ "grad_norm": 5.128388404846191,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 1.5832,
+ "step": 55
+ },
+ {
+ "epoch": 0.17536534446764093,
+ "grad_norm": 16.935827255249023,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 1.6553,
+ "step": 56
+ },
+ {
+ "epoch": 0.17849686847599164,
+ "grad_norm": 3.664458990097046,
+ "learning_rate": 2.85e-06,
+ "loss": 1.5,
+ "step": 57
+ },
+ {
+ "epoch": 0.18162839248434237,
+ "grad_norm": 7.763802528381348,
+ "learning_rate": 2.9e-06,
+ "loss": 1.367,
+ "step": 58
+ },
+ {
+ "epoch": 0.1847599164926931,
+ "grad_norm": 3.2216155529022217,
+ "learning_rate": 2.95e-06,
+ "loss": 1.3863,
+ "step": 59
+ },
+ {
+ "epoch": 0.18789144050104384,
+ "grad_norm": 4.384445667266846,
+ "learning_rate": 3e-06,
+ "loss": 1.4247,
+ "step": 60
+ },
+ {
+ "epoch": 0.19102296450939457,
+ "grad_norm": 4.8080878257751465,
+ "learning_rate": 3.05e-06,
+ "loss": 1.3257,
+ "step": 61
+ },
+ {
+ "epoch": 0.1941544885177453,
+ "grad_norm": 4.154761791229248,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 1.321,
+ "step": 62
+ },
+ {
+ "epoch": 0.19728601252609604,
+ "grad_norm": 6.4742112159729,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 1.2823,
+ "step": 63
+ },
+ {
+ "epoch": 0.20041753653444677,
+ "grad_norm": 2.583422899246216,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 1.2136,
+ "step": 64
+ },
+ {
+ "epoch": 0.2035490605427975,
+ "grad_norm": 4.1933488845825195,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 1.1855,
+ "step": 65
+ },
+ {
+ "epoch": 0.20668058455114824,
+ "grad_norm": 4.11049747467041,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 1.2389,
+ "step": 66
+ },
+ {
+ "epoch": 0.20981210855949894,
+ "grad_norm": 2.264458417892456,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 1.0651,
+ "step": 67
+ },
+ {
+ "epoch": 0.21294363256784968,
+ "grad_norm": 2.5408174991607666,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 1.1389,
+ "step": 68
+ },
+ {
+ "epoch": 0.2160751565762004,
+ "grad_norm": 7.82421350479126,
+ "learning_rate": 3.45e-06,
+ "loss": 1.0956,
+ "step": 69
+ },
+ {
+ "epoch": 0.21920668058455114,
+ "grad_norm": 3.070939064025879,
+ "learning_rate": 3.5e-06,
+ "loss": 1.0451,
+ "step": 70
+ },
+ {
+ "epoch": 0.22233820459290188,
+ "grad_norm": 2.6310527324676514,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 1.0538,
+ "step": 71
+ },
+ {
+ "epoch": 0.2254697286012526,
+ "grad_norm": 7.630155563354492,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 1.0052,
+ "step": 72
+ },
+ {
+ "epoch": 0.22860125260960334,
+ "grad_norm": 6.950636863708496,
+ "learning_rate": 3.65e-06,
+ "loss": 1.0473,
+ "step": 73
+ },
+ {
+ "epoch": 0.23173277661795408,
+ "grad_norm": 2.2703945636749268,
+ "learning_rate": 3.7e-06,
+ "loss": 1.0576,
+ "step": 74
+ },
+ {
+ "epoch": 0.2348643006263048,
+ "grad_norm": 3.3817710876464844,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 1.0177,
+ "step": 75
+ },
+ {
+ "epoch": 0.23799582463465555,
+ "grad_norm": 7.266414642333984,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 1.0645,
+ "step": 76
+ },
+ {
+ "epoch": 0.24112734864300625,
+ "grad_norm": 5.782608509063721,
+ "learning_rate": 3.85e-06,
+ "loss": 1.0162,
+ "step": 77
+ },
+ {
+ "epoch": 0.24425887265135698,
+ "grad_norm": 2.7938575744628906,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.9664,
+ "step": 78
+ },
+ {
+ "epoch": 0.24739039665970772,
+ "grad_norm": 6.681935787200928,
+ "learning_rate": 3.95e-06,
+ "loss": 0.953,
+ "step": 79
+ },
+ {
+ "epoch": 0.25052192066805845,
+ "grad_norm": 2.253279209136963,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.9568,
+ "step": 80
+ },
+ {
+ "epoch": 0.2536534446764092,
+ "grad_norm": 1.4875826835632324,
+ "learning_rate": 4.05e-06,
+ "loss": 0.9448,
+ "step": 81
+ },
+ {
+ "epoch": 0.2567849686847599,
+ "grad_norm": 2.4987940788269043,
+ "learning_rate": 4.1e-06,
+ "loss": 0.9393,
+ "step": 82
+ },
+ {
+ "epoch": 0.2599164926931106,
+ "grad_norm": 4.712948322296143,
+ "learning_rate": 4.15e-06,
+ "loss": 0.9532,
+ "step": 83
+ },
+ {
+ "epoch": 0.2630480167014614,
+ "grad_norm": 6.9030632972717285,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.96,
+ "step": 84
+ },
+ {
+ "epoch": 0.2661795407098121,
+ "grad_norm": 3.4780967235565186,
+ "learning_rate": 4.25e-06,
+ "loss": 0.8993,
+ "step": 85
+ },
+ {
+ "epoch": 0.26931106471816285,
+ "grad_norm": 1.526064395904541,
+ "learning_rate": 4.3e-06,
+ "loss": 0.9021,
+ "step": 86
+ },
+ {
+ "epoch": 0.27244258872651356,
+ "grad_norm": 10.727686882019043,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.856,
+ "step": 87
+ },
+ {
+ "epoch": 0.2755741127348643,
+ "grad_norm": 12.483160972595215,
+ "learning_rate": 4.4e-06,
+ "loss": 0.9357,
+ "step": 88
+ },
+ {
+ "epoch": 0.278705636743215,
+ "grad_norm": 6.544492244720459,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.9168,
+ "step": 89
+ },
+ {
+ "epoch": 0.2818371607515658,
+ "grad_norm": 1.178139567375183,
+ "learning_rate": 4.5e-06,
+ "loss": 0.8748,
+ "step": 90
+ },
+ {
+ "epoch": 0.2849686847599165,
+ "grad_norm": 1.711506962776184,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.8425,
+ "step": 91
+ },
+ {
+ "epoch": 0.2881002087682672,
+ "grad_norm": 3.281747341156006,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.8491,
+ "step": 92
+ },
+ {
+ "epoch": 0.29123173277661796,
+ "grad_norm": 2.2964377403259277,
+ "learning_rate": 4.65e-06,
+ "loss": 0.8038,
+ "step": 93
+ },
+ {
+ "epoch": 0.29436325678496866,
+ "grad_norm": 1.959700345993042,
+ "learning_rate": 4.7e-06,
+ "loss": 0.8439,
+ "step": 94
+ },
+ {
+ "epoch": 0.2974947807933194,
+ "grad_norm": 3.979384183883667,
+ "learning_rate": 4.75e-06,
+ "loss": 0.8839,
+ "step": 95
+ },
+ {
+ "epoch": 0.30062630480167013,
+ "grad_norm": 1.4721262454986572,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.845,
+ "step": 96
+ },
+ {
+ "epoch": 0.3037578288100209,
+ "grad_norm": 2.862248659133911,
+ "learning_rate": 4.85e-06,
+ "loss": 0.7748,
+ "step": 97
+ },
+ {
+ "epoch": 0.3068893528183716,
+ "grad_norm": 3.7439088821411133,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.8145,
+ "step": 98
+ },
+ {
+ "epoch": 0.31002087682672236,
+ "grad_norm": 1.6654618978500366,
+ "learning_rate": 4.95e-06,
+ "loss": 0.8326,
+ "step": 99
+ },
+ {
+ "epoch": 0.31315240083507306,
+ "grad_norm": 7.8437581062316895,
+ "learning_rate": 5e-06,
+ "loss": 0.8666,
+ "step": 100
+ },
+ {
+ "epoch": 0.3162839248434238,
+ "grad_norm": 6.429738521575928,
+ "learning_rate": 4.999996250830422e-06,
+ "loss": 0.836,
+ "step": 101
+ },
+ {
+ "epoch": 0.31941544885177453,
+ "grad_norm": 2.6017794609069824,
+ "learning_rate": 4.9999850033329326e-06,
+ "loss": 0.7785,
+ "step": 102
+ },
+ {
+ "epoch": 0.32254697286012524,
+ "grad_norm": 1.0575449466705322,
+ "learning_rate": 4.999966257541265e-06,
+ "loss": 0.7639,
+ "step": 103
+ },
+ {
+ "epoch": 0.325678496868476,
+ "grad_norm": 2.6932010650634766,
+ "learning_rate": 4.999940013511647e-06,
+ "loss": 0.8214,
+ "step": 104
+ },
+ {
+ "epoch": 0.3288100208768267,
+ "grad_norm": 2.925288438796997,
+ "learning_rate": 4.999906271322792e-06,
+ "loss": 0.8797,
+ "step": 105
+ },
+ {
+ "epoch": 0.33194154488517746,
+ "grad_norm": 1.3570607900619507,
+ "learning_rate": 4.9998650310759035e-06,
+ "loss": 0.792,
+ "step": 106
+ },
+ {
+ "epoch": 0.33507306889352817,
+ "grad_norm": 5.126713752746582,
+ "learning_rate": 4.999816292894676e-06,
+ "loss": 0.8352,
+ "step": 107
+ },
+ {
+ "epoch": 0.33820459290187893,
+ "grad_norm": 1.8966432809829712,
+ "learning_rate": 4.99976005692529e-06,
+ "loss": 0.7663,
+ "step": 108
+ },
+ {
+ "epoch": 0.34133611691022964,
+ "grad_norm": 1.3100829124450684,
+ "learning_rate": 4.999696323336418e-06,
+ "loss": 0.771,
+ "step": 109
+ },
+ {
+ "epoch": 0.3444676409185804,
+ "grad_norm": 2.4025354385375977,
+ "learning_rate": 4.999625092319218e-06,
+ "loss": 0.7618,
+ "step": 110
+ },
+ {
+ "epoch": 0.3475991649269311,
+ "grad_norm": 1.130232810974121,
+ "learning_rate": 4.999546364087334e-06,
+ "loss": 0.7705,
+ "step": 111
+ },
+ {
+ "epoch": 0.35073068893528186,
+ "grad_norm": 3.430262327194214,
+ "learning_rate": 4.999460138876901e-06,
+ "loss": 0.77,
+ "step": 112
+ },
+ {
+ "epoch": 0.35386221294363257,
+ "grad_norm": 1.1272103786468506,
+ "learning_rate": 4.999366416946536e-06,
+ "loss": 0.7133,
+ "step": 113
+ },
+ {
+ "epoch": 0.3569937369519833,
+ "grad_norm": 1.1740471124649048,
+ "learning_rate": 4.999265198577342e-06,
+ "loss": 0.7684,
+ "step": 114
+ },
+ {
+ "epoch": 0.36012526096033404,
+ "grad_norm": 1.3138248920440674,
+ "learning_rate": 4.999156484072907e-06,
+ "loss": 0.7888,
+ "step": 115
+ },
+ {
+ "epoch": 0.36325678496868474,
+ "grad_norm": 1.061711311340332,
+ "learning_rate": 4.999040273759304e-06,
+ "loss": 0.7484,
+ "step": 116
+ },
+ {
+ "epoch": 0.3663883089770355,
+ "grad_norm": 1.4682390689849854,
+ "learning_rate": 4.998916567985083e-06,
+ "loss": 0.7296,
+ "step": 117
+ },
+ {
+ "epoch": 0.3695198329853862,
+ "grad_norm": 2.884068250656128,
+ "learning_rate": 4.998785367121284e-06,
+ "loss": 0.7662,
+ "step": 118
+ },
+ {
+ "epoch": 0.37265135699373697,
+ "grad_norm": 0.9812761545181274,
+ "learning_rate": 4.9986466715614205e-06,
+ "loss": 0.7307,
+ "step": 119
+ },
+ {
+ "epoch": 0.3757828810020877,
+ "grad_norm": 2.2237496376037598,
+ "learning_rate": 4.998500481721484e-06,
+ "loss": 0.6761,
+ "step": 120
+ },
+ {
+ "epoch": 0.37891440501043844,
+ "grad_norm": 1.4004178047180176,
+ "learning_rate": 4.998346798039952e-06,
+ "loss": 0.7505,
+ "step": 121
+ },
+ {
+ "epoch": 0.38204592901878914,
+ "grad_norm": 5.54975700378418,
+ "learning_rate": 4.99818562097777e-06,
+ "loss": 0.7615,
+ "step": 122
+ },
+ {
+ "epoch": 0.38517745302713985,
+ "grad_norm": 6.17140531539917,
+ "learning_rate": 4.9980169510183624e-06,
+ "loss": 0.7002,
+ "step": 123
+ },
+ {
+ "epoch": 0.3883089770354906,
+ "grad_norm": 4.974380016326904,
+ "learning_rate": 4.997840788667628e-06,
+ "loss": 0.7449,
+ "step": 124
+ },
+ {
+ "epoch": 0.3914405010438413,
+ "grad_norm": 1.4133399724960327,
+ "learning_rate": 4.997657134453937e-06,
+ "loss": 0.7442,
+ "step": 125
+ },
+ {
+ "epoch": 0.3945720250521921,
+ "grad_norm": 1.868915319442749,
+ "learning_rate": 4.9974659889281295e-06,
+ "loss": 0.7104,
+ "step": 126
+ },
+ {
+ "epoch": 0.3977035490605428,
+ "grad_norm": 1.2599350214004517,
+ "learning_rate": 4.997267352663514e-06,
+ "loss": 0.7385,
+ "step": 127
+ },
+ {
+ "epoch": 0.40083507306889354,
+ "grad_norm": 1.4353641271591187,
+ "learning_rate": 4.997061226255869e-06,
+ "loss": 0.7081,
+ "step": 128
+ },
+ {
+ "epoch": 0.40396659707724425,
+ "grad_norm": 3.2492141723632812,
+ "learning_rate": 4.996847610323437e-06,
+ "loss": 0.7859,
+ "step": 129
+ },
+ {
+ "epoch": 0.407098121085595,
+ "grad_norm": 9.599719047546387,
+ "learning_rate": 4.996626505506923e-06,
+ "loss": 0.7241,
+ "step": 130
+ },
+ {
+ "epoch": 0.4102296450939457,
+ "grad_norm": 10.053650856018066,
+ "learning_rate": 4.996397912469494e-06,
+ "loss": 0.6841,
+ "step": 131
+ },
+ {
+ "epoch": 0.4133611691022965,
+ "grad_norm": 1.323876976966858,
+ "learning_rate": 4.996161831896777e-06,
+ "loss": 0.7317,
+ "step": 132
+ },
+ {
+ "epoch": 0.4164926931106472,
+ "grad_norm": 1.4180598258972168,
+ "learning_rate": 4.9959182644968594e-06,
+ "loss": 0.692,
+ "step": 133
+ },
+ {
+ "epoch": 0.4196242171189979,
+ "grad_norm": 1.2194396257400513,
+ "learning_rate": 4.99566721100028e-06,
+ "loss": 0.7068,
+ "step": 134
+ },
+ {
+ "epoch": 0.42275574112734865,
+ "grad_norm": 1.0984960794448853,
+ "learning_rate": 4.995408672160031e-06,
+ "loss": 0.6946,
+ "step": 135
+ },
+ {
+ "epoch": 0.42588726513569936,
+ "grad_norm": 1.9341071844100952,
+ "learning_rate": 4.995142648751561e-06,
+ "loss": 0.7467,
+ "step": 136
+ },
+ {
+ "epoch": 0.4290187891440501,
+ "grad_norm": 1.9960932731628418,
+ "learning_rate": 4.9948691415727594e-06,
+ "loss": 0.7379,
+ "step": 137
+ },
+ {
+ "epoch": 0.4321503131524008,
+ "grad_norm": 0.8743917942047119,
+ "learning_rate": 4.994588151443968e-06,
+ "loss": 0.66,
+ "step": 138
+ },
+ {
+ "epoch": 0.4352818371607516,
+ "grad_norm": 0.8655261993408203,
+ "learning_rate": 4.99429967920797e-06,
+ "loss": 0.6646,
+ "step": 139
+ },
+ {
+ "epoch": 0.4384133611691023,
+ "grad_norm": 5.462070941925049,
+ "learning_rate": 4.994003725729992e-06,
+ "loss": 0.643,
+ "step": 140
+ },
+ {
+ "epoch": 0.44154488517745305,
+ "grad_norm": 2.1401469707489014,
+ "learning_rate": 4.993700291897695e-06,
+ "loss": 0.6639,
+ "step": 141
+ },
+ {
+ "epoch": 0.44467640918580376,
+ "grad_norm": 1.8219833374023438,
+ "learning_rate": 4.9933893786211815e-06,
+ "loss": 0.6673,
+ "step": 142
+ },
+ {
+ "epoch": 0.44780793319415446,
+ "grad_norm": 1.641079306602478,
+ "learning_rate": 4.993070986832984e-06,
+ "loss": 0.658,
+ "step": 143
+ },
+ {
+ "epoch": 0.4509394572025052,
+ "grad_norm": 1.1739819049835205,
+ "learning_rate": 4.992745117488066e-06,
+ "loss": 0.6826,
+ "step": 144
+ },
+ {
+ "epoch": 0.45407098121085593,
+ "grad_norm": 2.309185743331909,
+ "learning_rate": 4.9924117715638185e-06,
+ "loss": 0.6536,
+ "step": 145
+ },
+ {
+ "epoch": 0.4572025052192067,
+ "grad_norm": 1.09304940700531,
+ "learning_rate": 4.99207095006006e-06,
+ "loss": 0.721,
+ "step": 146
+ },
+ {
+ "epoch": 0.4603340292275574,
+ "grad_norm": 0.9056984186172485,
+ "learning_rate": 4.991722653999025e-06,
+ "loss": 0.7019,
+ "step": 147
+ },
+ {
+ "epoch": 0.46346555323590816,
+ "grad_norm": 1.8440625667572021,
+ "learning_rate": 4.991366884425374e-06,
+ "loss": 0.707,
+ "step": 148
+ },
+ {
+ "epoch": 0.46659707724425886,
+ "grad_norm": 1.2244676351547241,
+ "learning_rate": 4.991003642406177e-06,
+ "loss": 0.6407,
+ "step": 149
+ },
+ {
+ "epoch": 0.4697286012526096,
+ "grad_norm": 0.9258589744567871,
+ "learning_rate": 4.99063292903092e-06,
+ "loss": 0.6954,
+ "step": 150
+ },
+ {
+ "epoch": 0.47286012526096033,
+ "grad_norm": 4.176390647888184,
+ "learning_rate": 4.990254745411496e-06,
+ "loss": 0.6812,
+ "step": 151
+ },
+ {
+ "epoch": 0.4759916492693111,
+ "grad_norm": 1.4322530031204224,
+ "learning_rate": 4.989869092682205e-06,
+ "loss": 0.6808,
+ "step": 152
+ },
+ {
+ "epoch": 0.4791231732776618,
+ "grad_norm": 0.8017717003822327,
+ "learning_rate": 4.989475971999748e-06,
+ "loss": 0.687,
+ "step": 153
+ },
+ {
+ "epoch": 0.4822546972860125,
+ "grad_norm": 1.5641374588012695,
+ "learning_rate": 4.989075384543228e-06,
+ "loss": 0.6599,
+ "step": 154
+ },
+ {
+ "epoch": 0.48538622129436326,
+ "grad_norm": 1.1522141695022583,
+ "learning_rate": 4.98866733151414e-06,
+ "loss": 0.6546,
+ "step": 155
+ },
+ {
+ "epoch": 0.48851774530271397,
+ "grad_norm": 0.8593171238899231,
+ "learning_rate": 4.988251814136372e-06,
+ "loss": 0.6857,
+ "step": 156
+ },
+ {
+ "epoch": 0.49164926931106473,
+ "grad_norm": 2.668159246444702,
+ "learning_rate": 4.9878288336562e-06,
+ "loss": 0.661,
+ "step": 157
+ },
+ {
+ "epoch": 0.49478079331941544,
+ "grad_norm": 0.9953671097755432,
+ "learning_rate": 4.987398391342285e-06,
+ "loss": 0.6512,
+ "step": 158
+ },
+ {
+ "epoch": 0.4979123173277662,
+ "grad_norm": 1.042872667312622,
+ "learning_rate": 4.986960488485667e-06,
+ "loss": 0.6311,
+ "step": 159
+ },
+ {
+ "epoch": 0.5010438413361169,
+ "grad_norm": 0.9070663452148438,
+ "learning_rate": 4.9865151263997645e-06,
+ "loss": 0.675,
+ "step": 160
+ },
+ {
+ "epoch": 0.5041753653444676,
+ "grad_norm": 0.8460433483123779,
+ "learning_rate": 4.986062306420367e-06,
+ "loss": 0.6635,
+ "step": 161
+ },
+ {
+ "epoch": 0.5073068893528184,
+ "grad_norm": 1.2639834880828857,
+ "learning_rate": 4.985602029905635e-06,
+ "loss": 0.6327,
+ "step": 162
+ },
+ {
+ "epoch": 0.5104384133611691,
+ "grad_norm": 0.8775074481964111,
+ "learning_rate": 4.985134298236091e-06,
+ "loss": 0.644,
+ "step": 163
+ },
+ {
+ "epoch": 0.5135699373695198,
+ "grad_norm": 1.2031961679458618,
+ "learning_rate": 4.98465911281462e-06,
+ "loss": 0.6254,
+ "step": 164
+ },
+ {
+ "epoch": 0.5167014613778705,
+ "grad_norm": 0.892494797706604,
+ "learning_rate": 4.984176475066463e-06,
+ "loss": 0.7122,
+ "step": 165
+ },
+ {
+ "epoch": 0.5198329853862212,
+ "grad_norm": 2.7122485637664795,
+ "learning_rate": 4.983686386439212e-06,
+ "loss": 0.6679,
+ "step": 166
+ },
+ {
+ "epoch": 0.5229645093945721,
+ "grad_norm": 0.9344426989555359,
+ "learning_rate": 4.983188848402806e-06,
+ "loss": 0.6319,
+ "step": 167
+ },
+ {
+ "epoch": 0.5260960334029228,
+ "grad_norm": 1.4093577861785889,
+ "learning_rate": 4.982683862449531e-06,
+ "loss": 0.6425,
+ "step": 168
+ },
+ {
+ "epoch": 0.5292275574112735,
+ "grad_norm": 1.1285009384155273,
+ "learning_rate": 4.982171430094007e-06,
+ "loss": 0.6298,
+ "step": 169
+ },
+ {
+ "epoch": 0.5323590814196242,
+ "grad_norm": 1.952778935432434,
+ "learning_rate": 4.981651552873193e-06,
+ "loss": 0.7066,
+ "step": 170
+ },
+ {
+ "epoch": 0.535490605427975,
+ "grad_norm": 5.133765697479248,
+ "learning_rate": 4.981124232346374e-06,
+ "loss": 0.6634,
+ "step": 171
+ },
+ {
+ "epoch": 0.5386221294363257,
+ "grad_norm": 0.9770542979240417,
+ "learning_rate": 4.980589470095161e-06,
+ "loss": 0.7121,
+ "step": 172
+ },
+ {
+ "epoch": 0.5417536534446764,
+ "grad_norm": 0.8414323925971985,
+ "learning_rate": 4.980047267723487e-06,
+ "loss": 0.6397,
+ "step": 173
+ },
+ {
+ "epoch": 0.5448851774530271,
+ "grad_norm": 1.9173879623413086,
+ "learning_rate": 4.979497626857596e-06,
+ "loss": 0.6228,
+ "step": 174
+ },
+ {
+ "epoch": 0.5480167014613778,
+ "grad_norm": 1.0823363065719604,
+ "learning_rate": 4.978940549146048e-06,
+ "loss": 0.6475,
+ "step": 175
+ },
+ {
+ "epoch": 0.5511482254697286,
+ "grad_norm": 3.715353488922119,
+ "learning_rate": 4.978376036259706e-06,
+ "loss": 0.7127,
+ "step": 176
+ },
+ {
+ "epoch": 0.5542797494780793,
+ "grad_norm": 0.981584370136261,
+ "learning_rate": 4.9778040898917325e-06,
+ "loss": 0.6468,
+ "step": 177
+ },
+ {
+ "epoch": 0.55741127348643,
+ "grad_norm": 1.70566987991333,
+ "learning_rate": 4.977224711757587e-06,
+ "loss": 0.6476,
+ "step": 178
+ },
+ {
+ "epoch": 0.5605427974947808,
+ "grad_norm": 0.9217923283576965,
+ "learning_rate": 4.976637903595019e-06,
+ "loss": 0.6731,
+ "step": 179
+ },
+ {
+ "epoch": 0.5636743215031316,
+ "grad_norm": 0.8994677662849426,
+ "learning_rate": 4.976043667164063e-06,
+ "loss": 0.6562,
+ "step": 180
+ },
+ {
+ "epoch": 0.5668058455114823,
+ "grad_norm": 1.1613017320632935,
+ "learning_rate": 4.975442004247034e-06,
+ "loss": 0.6417,
+ "step": 181
+ },
+ {
+ "epoch": 0.569937369519833,
+ "grad_norm": 1.6041977405548096,
+ "learning_rate": 4.974832916648521e-06,
+ "loss": 0.6029,
+ "step": 182
+ },
+ {
+ "epoch": 0.5730688935281837,
+ "grad_norm": 1.7978405952453613,
+ "learning_rate": 4.974216406195383e-06,
+ "loss": 0.6269,
+ "step": 183
+ },
+ {
+ "epoch": 0.5762004175365344,
+ "grad_norm": 1.6021920442581177,
+ "learning_rate": 4.973592474736739e-06,
+ "loss": 0.6149,
+ "step": 184
+ },
+ {
+ "epoch": 0.5793319415448852,
+ "grad_norm": 0.8973568677902222,
+ "learning_rate": 4.972961124143971e-06,
+ "loss": 0.6648,
+ "step": 185
+ },
+ {
+ "epoch": 0.5824634655532359,
+ "grad_norm": 1.9432591199874878,
+ "learning_rate": 4.972322356310711e-06,
+ "loss": 0.6299,
+ "step": 186
+ },
+ {
+ "epoch": 0.5855949895615866,
+ "grad_norm": 4.457028388977051,
+ "learning_rate": 4.971676173152839e-06,
+ "loss": 0.656,
+ "step": 187
+ },
+ {
+ "epoch": 0.5887265135699373,
+ "grad_norm": 2.0989716053009033,
+ "learning_rate": 4.971022576608473e-06,
+ "loss": 0.6539,
+ "step": 188
+ },
+ {
+ "epoch": 0.5918580375782881,
+ "grad_norm": 1.0646967887878418,
+ "learning_rate": 4.97036156863797e-06,
+ "loss": 0.6727,
+ "step": 189
+ },
+ {
+ "epoch": 0.5949895615866388,
+ "grad_norm": 1.6522265672683716,
+ "learning_rate": 4.969693151223914e-06,
+ "loss": 0.6643,
+ "step": 190
+ },
+ {
+ "epoch": 0.5981210855949896,
+ "grad_norm": 1.7503505945205688,
+ "learning_rate": 4.969017326371115e-06,
+ "loss": 0.6402,
+ "step": 191
+ },
+ {
+ "epoch": 0.6012526096033403,
+ "grad_norm": 1.2341989278793335,
+ "learning_rate": 4.968334096106597e-06,
+ "loss": 0.6413,
+ "step": 192
+ },
+ {
+ "epoch": 0.6043841336116911,
+ "grad_norm": 3.089054584503174,
+ "learning_rate": 4.967643462479597e-06,
+ "loss": 0.6825,
+ "step": 193
+ },
+ {
+ "epoch": 0.6075156576200418,
+ "grad_norm": 2.711623430252075,
+ "learning_rate": 4.966945427561557e-06,
+ "loss": 0.65,
+ "step": 194
+ },
+ {
+ "epoch": 0.6106471816283925,
+ "grad_norm": 4.641184329986572,
+ "learning_rate": 4.966239993446118e-06,
+ "loss": 0.6229,
+ "step": 195
+ },
+ {
+ "epoch": 0.6137787056367432,
+ "grad_norm": 1.7984074354171753,
+ "learning_rate": 4.965527162249114e-06,
+ "loss": 0.6473,
+ "step": 196
+ },
+ {
+ "epoch": 0.6169102296450939,
+ "grad_norm": 1.1643115282058716,
+ "learning_rate": 4.964806936108566e-06,
+ "loss": 0.6404,
+ "step": 197
+ },
+ {
+ "epoch": 0.6200417536534447,
+ "grad_norm": 2.1877920627593994,
+ "learning_rate": 4.9640793171846725e-06,
+ "loss": 0.6185,
+ "step": 198
+ },
+ {
+ "epoch": 0.6231732776617954,
+ "grad_norm": 1.7970566749572754,
+ "learning_rate": 4.963344307659807e-06,
+ "loss": 0.634,
+ "step": 199
+ },
+ {
+ "epoch": 0.6263048016701461,
+ "grad_norm": 1.6014361381530762,
+ "learning_rate": 4.96260190973851e-06,
+ "loss": 0.6562,
+ "step": 200
+ },
+ {
+ "epoch": 0.6294363256784968,
+ "grad_norm": 0.8743320107460022,
+ "learning_rate": 4.961852125647482e-06,
+ "loss": 0.6133,
+ "step": 201
+ },
+ {
+ "epoch": 0.6325678496868476,
+ "grad_norm": 1.9526551961898804,
+ "learning_rate": 4.961094957635578e-06,
+ "loss": 0.6451,
+ "step": 202
+ },
+ {
+ "epoch": 0.6356993736951984,
+ "grad_norm": 3.6597347259521484,
+ "learning_rate": 4.960330407973798e-06,
+ "loss": 0.6386,
+ "step": 203
+ },
+ {
+ "epoch": 0.6388308977035491,
+ "grad_norm": 1.7180207967758179,
+ "learning_rate": 4.959558478955283e-06,
+ "loss": 0.6688,
+ "step": 204
+ },
+ {
+ "epoch": 0.6419624217118998,
+ "grad_norm": 0.9058470129966736,
+ "learning_rate": 4.958779172895308e-06,
+ "loss": 0.6161,
+ "step": 205
+ },
+ {
+ "epoch": 0.6450939457202505,
+ "grad_norm": 1.0031033754348755,
+ "learning_rate": 4.957992492131274e-06,
+ "loss": 0.6437,
+ "step": 206
+ },
+ {
+ "epoch": 0.6482254697286013,
+ "grad_norm": 1.5846725702285767,
+ "learning_rate": 4.9571984390226985e-06,
+ "loss": 0.6332,
+ "step": 207
+ },
+ {
+ "epoch": 0.651356993736952,
+ "grad_norm": 1.9951609373092651,
+ "learning_rate": 4.956397015951215e-06,
+ "loss": 0.636,
+ "step": 208
+ },
+ {
+ "epoch": 0.6544885177453027,
+ "grad_norm": 1.4122583866119385,
+ "learning_rate": 4.95558822532056e-06,
+ "loss": 0.6586,
+ "step": 209
+ },
+ {
+ "epoch": 0.6576200417536534,
+ "grad_norm": 1.2243481874465942,
+ "learning_rate": 4.954772069556568e-06,
+ "loss": 0.6313,
+ "step": 210
+ },
+ {
+ "epoch": 0.6607515657620042,
+ "grad_norm": 0.8756356835365295,
+ "learning_rate": 4.953948551107164e-06,
+ "loss": 0.6406,
+ "step": 211
+ },
+ {
+ "epoch": 0.6638830897703549,
+ "grad_norm": 2.9979734420776367,
+ "learning_rate": 4.953117672442356e-06,
+ "loss": 0.5803,
+ "step": 212
+ },
+ {
+ "epoch": 0.6670146137787056,
+ "grad_norm": 2.1859359741210938,
+ "learning_rate": 4.952279436054229e-06,
+ "loss": 0.6607,
+ "step": 213
+ },
+ {
+ "epoch": 0.6701461377870563,
+ "grad_norm": 0.6929755806922913,
+ "learning_rate": 4.9514338444569346e-06,
+ "loss": 0.5989,
+ "step": 214
+ },
+ {
+ "epoch": 0.673277661795407,
+ "grad_norm": 1.0361783504486084,
+ "learning_rate": 4.950580900186685e-06,
+ "loss": 0.6654,
+ "step": 215
+ },
+ {
+ "epoch": 0.6764091858037579,
+ "grad_norm": 1.210898518562317,
+ "learning_rate": 4.9497206058017475e-06,
+ "loss": 0.6213,
+ "step": 216
+ },
+ {
+ "epoch": 0.6795407098121086,
+ "grad_norm": 1.200990080833435,
+ "learning_rate": 4.948852963882434e-06,
+ "loss": 0.6654,
+ "step": 217
+ },
+ {
+ "epoch": 0.6826722338204593,
+ "grad_norm": 1.481831669807434,
+ "learning_rate": 4.947977977031093e-06,
+ "loss": 0.6474,
+ "step": 218
+ },
+ {
+ "epoch": 0.68580375782881,
+ "grad_norm": 0.9883334636688232,
+ "learning_rate": 4.947095647872103e-06,
+ "loss": 0.6735,
+ "step": 219
+ },
+ {
+ "epoch": 0.6889352818371608,
+ "grad_norm": 0.7436536550521851,
+ "learning_rate": 4.946205979051868e-06,
+ "loss": 0.6456,
+ "step": 220
+ },
+ {
+ "epoch": 0.6920668058455115,
+ "grad_norm": 0.9057570099830627,
+ "learning_rate": 4.945308973238802e-06,
+ "loss": 0.6228,
+ "step": 221
+ },
+ {
+ "epoch": 0.6951983298538622,
+ "grad_norm": 1.341081142425537,
+ "learning_rate": 4.944404633123324e-06,
+ "loss": 0.6417,
+ "step": 222
+ },
+ {
+ "epoch": 0.6983298538622129,
+ "grad_norm": 0.7958157062530518,
+ "learning_rate": 4.943492961417859e-06,
+ "loss": 0.6494,
+ "step": 223
+ },
+ {
+ "epoch": 0.7014613778705637,
+ "grad_norm": 1.216025471687317,
+ "learning_rate": 4.9425739608568106e-06,
+ "loss": 0.6566,
+ "step": 224
+ },
+ {
+ "epoch": 0.7045929018789144,
+ "grad_norm": 0.9774854779243469,
+ "learning_rate": 4.9416476341965735e-06,
+ "loss": 0.6171,
+ "step": 225
+ },
+ {
+ "epoch": 0.7077244258872651,
+ "grad_norm": 2.1562681198120117,
+ "learning_rate": 4.940713984215512e-06,
+ "loss": 0.629,
+ "step": 226
+ },
+ {
+ "epoch": 0.7108559498956158,
+ "grad_norm": 1.9521286487579346,
+ "learning_rate": 4.9397730137139556e-06,
+ "loss": 0.6475,
+ "step": 227
+ },
+ {
+ "epoch": 0.7139874739039666,
+ "grad_norm": 1.5749104022979736,
+ "learning_rate": 4.9388247255141895e-06,
+ "loss": 0.6053,
+ "step": 228
+ },
+ {
+ "epoch": 0.7171189979123174,
+ "grad_norm": 1.2008254528045654,
+ "learning_rate": 4.937869122460449e-06,
+ "loss": 0.6052,
+ "step": 229
+ },
+ {
+ "epoch": 0.7202505219206681,
+ "grad_norm": 1.0774102210998535,
+ "learning_rate": 4.93690620741891e-06,
+ "loss": 0.6099,
+ "step": 230
+ },
+ {
+ "epoch": 0.7233820459290188,
+ "grad_norm": 1.0929996967315674,
+ "learning_rate": 4.935935983277675e-06,
+ "loss": 0.6363,
+ "step": 231
+ },
+ {
+ "epoch": 0.7265135699373695,
+ "grad_norm": 0.8830653429031372,
+ "learning_rate": 4.934958452946774e-06,
+ "loss": 0.6136,
+ "step": 232
+ },
+ {
+ "epoch": 0.7296450939457203,
+ "grad_norm": 3.591218948364258,
+ "learning_rate": 4.933973619358147e-06,
+ "loss": 0.5962,
+ "step": 233
+ },
+ {
+ "epoch": 0.732776617954071,
+ "grad_norm": 2.5797672271728516,
+ "learning_rate": 4.932981485465643e-06,
+ "loss": 0.6405,
+ "step": 234
+ },
+ {
+ "epoch": 0.7359081419624217,
+ "grad_norm": 1.0467664003372192,
+ "learning_rate": 4.9319820542450025e-06,
+ "loss": 0.6155,
+ "step": 235
+ },
+ {
+ "epoch": 0.7390396659707724,
+ "grad_norm": 0.8099795579910278,
+ "learning_rate": 4.930975328693856e-06,
+ "loss": 0.5615,
+ "step": 236
+ },
+ {
+ "epoch": 0.7421711899791231,
+ "grad_norm": 0.8906702995300293,
+ "learning_rate": 4.92996131183171e-06,
+ "loss": 0.6501,
+ "step": 237
+ },
+ {
+ "epoch": 0.7453027139874739,
+ "grad_norm": 1.0871416330337524,
+ "learning_rate": 4.928940006699944e-06,
+ "loss": 0.6282,
+ "step": 238
+ },
+ {
+ "epoch": 0.7484342379958246,
+ "grad_norm": 1.3209614753723145,
+ "learning_rate": 4.927911416361792e-06,
+ "loss": 0.598,
+ "step": 239
+ },
+ {
+ "epoch": 0.7515657620041754,
+ "grad_norm": 1.2252682447433472,
+ "learning_rate": 4.926875543902344e-06,
+ "loss": 0.6433,
+ "step": 240
+ },
+ {
+ "epoch": 0.7546972860125261,
+ "grad_norm": 1.0569007396697998,
+ "learning_rate": 4.9258323924285285e-06,
+ "loss": 0.5927,
+ "step": 241
+ },
+ {
+ "epoch": 0.7578288100208769,
+ "grad_norm": 0.9309014081954956,
+ "learning_rate": 4.924781965069106e-06,
+ "loss": 0.5927,
+ "step": 242
+ },
+ {
+ "epoch": 0.7609603340292276,
+ "grad_norm": 1.0200378894805908,
+ "learning_rate": 4.923724264974662e-06,
+ "loss": 0.6064,
+ "step": 243
+ },
+ {
+ "epoch": 0.7640918580375783,
+ "grad_norm": 1.0533075332641602,
+ "learning_rate": 4.922659295317593e-06,
+ "loss": 0.6373,
+ "step": 244
+ },
+ {
+ "epoch": 0.767223382045929,
+ "grad_norm": 0.7889382839202881,
+ "learning_rate": 4.921587059292102e-06,
+ "loss": 0.5887,
+ "step": 245
+ },
+ {
+ "epoch": 0.7703549060542797,
+ "grad_norm": 0.7943588495254517,
+ "learning_rate": 4.920507560114183e-06,
+ "loss": 0.593,
+ "step": 246
+ },
+ {
+ "epoch": 0.7734864300626305,
+ "grad_norm": 0.8247205018997192,
+ "learning_rate": 4.919420801021617e-06,
+ "loss": 0.6151,
+ "step": 247
+ },
+ {
+ "epoch": 0.7766179540709812,
+ "grad_norm": 0.9979158043861389,
+ "learning_rate": 4.91832678527396e-06,
+ "loss": 0.6019,
+ "step": 248
+ },
+ {
+ "epoch": 0.7797494780793319,
+ "grad_norm": 0.9346868991851807,
+ "learning_rate": 4.917225516152532e-06,
+ "loss": 0.6098,
+ "step": 249
+ },
+ {
+ "epoch": 0.7828810020876826,
+ "grad_norm": 0.7487881183624268,
+ "learning_rate": 4.916116996960408e-06,
+ "loss": 0.5965,
+ "step": 250
+ },
+ {
+ "epoch": 0.7860125260960334,
+ "grad_norm": 0.821576714515686,
+ "learning_rate": 4.915001231022411e-06,
+ "loss": 0.6483,
+ "step": 251
+ },
+ {
+ "epoch": 0.7891440501043842,
+ "grad_norm": 1.0413196086883545,
+ "learning_rate": 4.913878221685096e-06,
+ "loss": 0.6108,
+ "step": 252
+ },
+ {
+ "epoch": 0.7922755741127349,
+ "grad_norm": 0.9560331702232361,
+ "learning_rate": 4.912747972316745e-06,
+ "loss": 0.5758,
+ "step": 253
+ },
+ {
+ "epoch": 0.7954070981210856,
+ "grad_norm": 0.8964638113975525,
+ "learning_rate": 4.911610486307356e-06,
+ "loss": 0.6432,
+ "step": 254
+ },
+ {
+ "epoch": 0.7985386221294363,
+ "grad_norm": 0.8418346047401428,
+ "learning_rate": 4.910465767068631e-06,
+ "loss": 0.6027,
+ "step": 255
+ },
+ {
+ "epoch": 0.8016701461377871,
+ "grad_norm": 1.792371153831482,
+ "learning_rate": 4.909313818033966e-06,
+ "loss": 0.6198,
+ "step": 256
+ },
+ {
+ "epoch": 0.8048016701461378,
+ "grad_norm": 1.036665439605713,
+ "learning_rate": 4.908154642658446e-06,
+ "loss": 0.6255,
+ "step": 257
+ },
+ {
+ "epoch": 0.8079331941544885,
+ "grad_norm": 0.7592151165008545,
+ "learning_rate": 4.906988244418823e-06,
+ "loss": 0.6035,
+ "step": 258
+ },
+ {
+ "epoch": 0.8110647181628392,
+ "grad_norm": 0.8843073844909668,
+ "learning_rate": 4.90581462681352e-06,
+ "loss": 0.6299,
+ "step": 259
+ },
+ {
+ "epoch": 0.81419624217119,
+ "grad_norm": 0.9489964246749878,
+ "learning_rate": 4.9046337933626086e-06,
+ "loss": 0.5869,
+ "step": 260
+ },
+ {
+ "epoch": 0.8173277661795407,
+ "grad_norm": 0.851691722869873,
+ "learning_rate": 4.903445747607806e-06,
+ "loss": 0.603,
+ "step": 261
+ },
+ {
+ "epoch": 0.8204592901878914,
+ "grad_norm": 1.3722106218338013,
+ "learning_rate": 4.902250493112458e-06,
+ "loss": 0.5939,
+ "step": 262
+ },
+ {
+ "epoch": 0.8235908141962421,
+ "grad_norm": 1.1002827882766724,
+ "learning_rate": 4.901048033461537e-06,
+ "loss": 0.6452,
+ "step": 263
+ },
+ {
+ "epoch": 0.826722338204593,
+ "grad_norm": 0.8428632020950317,
+ "learning_rate": 4.89983837226162e-06,
+ "loss": 0.5956,
+ "step": 264
+ },
+ {
+ "epoch": 0.8298538622129437,
+ "grad_norm": 0.7666584849357605,
+ "learning_rate": 4.898621513140889e-06,
+ "loss": 0.6067,
+ "step": 265
+ },
+ {
+ "epoch": 0.8329853862212944,
+ "grad_norm": 0.8413611054420471,
+ "learning_rate": 4.897397459749113e-06,
+ "loss": 0.5985,
+ "step": 266
+ },
+ {
+ "epoch": 0.8361169102296451,
+ "grad_norm": 2.3374335765838623,
+ "learning_rate": 4.896166215757638e-06,
+ "loss": 0.5885,
+ "step": 267
+ },
+ {
+ "epoch": 0.8392484342379958,
+ "grad_norm": 2.236640214920044,
+ "learning_rate": 4.894927784859377e-06,
+ "loss": 0.6408,
+ "step": 268
+ },
+ {
+ "epoch": 0.8423799582463466,
+ "grad_norm": 0.9715856313705444,
+ "learning_rate": 4.893682170768802e-06,
+ "loss": 0.5954,
+ "step": 269
+ },
+ {
+ "epoch": 0.8455114822546973,
+ "grad_norm": 1.0249912738800049,
+ "learning_rate": 4.892429377221928e-06,
+ "loss": 0.6186,
+ "step": 270
+ },
+ {
+ "epoch": 0.848643006263048,
+ "grad_norm": 1.255426049232483,
+ "learning_rate": 4.891169407976302e-06,
+ "loss": 0.6351,
+ "step": 271
+ },
+ {
+ "epoch": 0.8517745302713987,
+ "grad_norm": 0.9339559674263,
+ "learning_rate": 4.889902266810995e-06,
+ "loss": 0.5944,
+ "step": 272
+ },
+ {
+ "epoch": 0.8549060542797495,
+ "grad_norm": 1.2473429441452026,
+ "learning_rate": 4.888627957526589e-06,
+ "loss": 0.544,
+ "step": 273
+ },
+ {
+ "epoch": 0.8580375782881002,
+ "grad_norm": 1.0589442253112793,
+ "learning_rate": 4.887346483945166e-06,
+ "loss": 0.5543,
+ "step": 274
+ },
+ {
+ "epoch": 0.8611691022964509,
+ "grad_norm": 0.9844024777412415,
+ "learning_rate": 4.886057849910294e-06,
+ "loss": 0.5941,
+ "step": 275
+ },
+ {
+ "epoch": 0.8643006263048016,
+ "grad_norm": 2.88578200340271,
+ "learning_rate": 4.8847620592870196e-06,
+ "loss": 0.6124,
+ "step": 276
+ },
+ {
+ "epoch": 0.8674321503131524,
+ "grad_norm": 0.7496054172515869,
+ "learning_rate": 4.8834591159618524e-06,
+ "loss": 0.6006,
+ "step": 277
+ },
+ {
+ "epoch": 0.8705636743215032,
+ "grad_norm": 0.7403052449226379,
+ "learning_rate": 4.88214902384276e-06,
+ "loss": 0.5911,
+ "step": 278
+ },
+ {
+ "epoch": 0.8736951983298539,
+ "grad_norm": 0.9003771543502808,
+ "learning_rate": 4.880831786859146e-06,
+ "loss": 0.6347,
+ "step": 279
+ },
+ {
+ "epoch": 0.8768267223382046,
+ "grad_norm": 1.0345501899719238,
+ "learning_rate": 4.879507408961847e-06,
+ "loss": 0.6111,
+ "step": 280
+ },
+ {
+ "epoch": 0.8799582463465553,
+ "grad_norm": 1.4385879039764404,
+ "learning_rate": 4.878175894123116e-06,
+ "loss": 0.6454,
+ "step": 281
+ },
+ {
+ "epoch": 0.8830897703549061,
+ "grad_norm": 0.8469482064247131,
+ "learning_rate": 4.8768372463366145e-06,
+ "loss": 0.6163,
+ "step": 282
+ },
+ {
+ "epoch": 0.8862212943632568,
+ "grad_norm": 0.8859589695930481,
+ "learning_rate": 4.875491469617395e-06,
+ "loss": 0.6144,
+ "step": 283
+ },
+ {
+ "epoch": 0.8893528183716075,
+ "grad_norm": 1.8436834812164307,
+ "learning_rate": 4.874138568001895e-06,
+ "loss": 0.6275,
+ "step": 284
+ },
+ {
+ "epoch": 0.8924843423799582,
+ "grad_norm": 0.6646101474761963,
+ "learning_rate": 4.87277854554792e-06,
+ "loss": 0.615,
+ "step": 285
+ },
+ {
+ "epoch": 0.8956158663883089,
+ "grad_norm": 1.0070925951004028,
+ "learning_rate": 4.871411406334633e-06,
+ "loss": 0.5898,
+ "step": 286
+ },
+ {
+ "epoch": 0.8987473903966597,
+ "grad_norm": 0.9785194993019104,
+ "learning_rate": 4.870037154462545e-06,
+ "loss": 0.5992,
+ "step": 287
+ },
+ {
+ "epoch": 0.9018789144050104,
+ "grad_norm": 0.7244889736175537,
+ "learning_rate": 4.868655794053497e-06,
+ "loss": 0.6078,
+ "step": 288
+ },
+ {
+ "epoch": 0.9050104384133612,
+ "grad_norm": 1.4496444463729858,
+ "learning_rate": 4.8672673292506535e-06,
+ "loss": 0.5855,
+ "step": 289
+ },
+ {
+ "epoch": 0.9081419624217119,
+ "grad_norm": 1.8514957427978516,
+ "learning_rate": 4.865871764218486e-06,
+ "loss": 0.5707,
+ "step": 290
+ },
+ {
+ "epoch": 0.9112734864300627,
+ "grad_norm": 0.8439773321151733,
+ "learning_rate": 4.864469103142763e-06,
+ "loss": 0.5562,
+ "step": 291
+ },
+ {
+ "epoch": 0.9144050104384134,
+ "grad_norm": 0.8146086931228638,
+ "learning_rate": 4.8630593502305355e-06,
+ "loss": 0.6161,
+ "step": 292
+ },
+ {
+ "epoch": 0.9175365344467641,
+ "grad_norm": 0.8920315504074097,
+ "learning_rate": 4.861642509710126e-06,
+ "loss": 0.6139,
+ "step": 293
+ },
+ {
+ "epoch": 0.9206680584551148,
+ "grad_norm": 1.4980088472366333,
+ "learning_rate": 4.860218585831116e-06,
+ "loss": 0.6187,
+ "step": 294
+ },
+ {
+ "epoch": 0.9237995824634656,
+ "grad_norm": 0.9910127520561218,
+ "learning_rate": 4.8587875828643285e-06,
+ "loss": 0.5852,
+ "step": 295
+ },
+ {
+ "epoch": 0.9269311064718163,
+ "grad_norm": 0.819600522518158,
+ "learning_rate": 4.857349505101823e-06,
+ "loss": 0.6172,
+ "step": 296
+ },
+ {
+ "epoch": 0.930062630480167,
+ "grad_norm": 1.1059772968292236,
+ "learning_rate": 4.855904356856878e-06,
+ "loss": 0.5868,
+ "step": 297
+ },
+ {
+ "epoch": 0.9331941544885177,
+ "grad_norm": 1.2362196445465088,
+ "learning_rate": 4.854452142463977e-06,
+ "loss": 0.625,
+ "step": 298
+ },
+ {
+ "epoch": 0.9363256784968684,
+ "grad_norm": 0.9956470727920532,
+ "learning_rate": 4.852992866278799e-06,
+ "loss": 0.5923,
+ "step": 299
+ },
+ {
+ "epoch": 0.9394572025052192,
+ "grad_norm": 0.864109218120575,
+ "learning_rate": 4.851526532678203e-06,
+ "loss": 0.6315,
+ "step": 300
+ },
+ {
+ "epoch": 0.94258872651357,
+ "grad_norm": 0.8900614380836487,
+ "learning_rate": 4.850053146060217e-06,
+ "loss": 0.6128,
+ "step": 301
+ },
+ {
+ "epoch": 0.9457202505219207,
+ "grad_norm": 0.927254855632782,
+ "learning_rate": 4.84857271084402e-06,
+ "loss": 0.5955,
+ "step": 302
+ },
+ {
+ "epoch": 0.9488517745302714,
+ "grad_norm": 1.0046517848968506,
+ "learning_rate": 4.847085231469935e-06,
+ "loss": 0.6134,
+ "step": 303
+ },
+ {
+ "epoch": 0.9519832985386222,
+ "grad_norm": 0.734597384929657,
+ "learning_rate": 4.8455907123994125e-06,
+ "loss": 0.5927,
+ "step": 304
+ },
+ {
+ "epoch": 0.9551148225469729,
+ "grad_norm": 0.7338348031044006,
+ "learning_rate": 4.844089158115016e-06,
+ "loss": 0.5897,
+ "step": 305
+ },
+ {
+ "epoch": 0.9582463465553236,
+ "grad_norm": 0.9163988828659058,
+ "learning_rate": 4.8425805731204106e-06,
+ "loss": 0.6051,
+ "step": 306
+ },
+ {
+ "epoch": 0.9613778705636743,
+ "grad_norm": 1.050246238708496,
+ "learning_rate": 4.84106496194035e-06,
+ "loss": 0.5751,
+ "step": 307
+ },
+ {
+ "epoch": 0.964509394572025,
+ "grad_norm": 0.7637603878974915,
+ "learning_rate": 4.83954232912066e-06,
+ "loss": 0.5677,
+ "step": 308
+ },
+ {
+ "epoch": 0.9676409185803758,
+ "grad_norm": 0.7110525965690613,
+ "learning_rate": 4.838012679228229e-06,
+ "loss": 0.6051,
+ "step": 309
+ },
+ {
+ "epoch": 0.9707724425887265,
+ "grad_norm": 0.7662068605422974,
+ "learning_rate": 4.836476016850988e-06,
+ "loss": 0.59,
+ "step": 310
+ },
+ {
+ "epoch": 0.9739039665970772,
+ "grad_norm": 0.8907375335693359,
+ "learning_rate": 4.834932346597906e-06,
+ "loss": 0.5792,
+ "step": 311
+ },
+ {
+ "epoch": 0.9770354906054279,
+ "grad_norm": 0.8939849138259888,
+ "learning_rate": 4.833381673098966e-06,
+ "loss": 0.6062,
+ "step": 312
+ },
+ {
+ "epoch": 0.9801670146137788,
+ "grad_norm": 0.8878788948059082,
+ "learning_rate": 4.8318240010051595e-06,
+ "loss": 0.5694,
+ "step": 313
+ },
+ {
+ "epoch": 0.9832985386221295,
+ "grad_norm": 1.2523870468139648,
+ "learning_rate": 4.830259334988468e-06,
+ "loss": 0.5809,
+ "step": 314
+ },
+ {
+ "epoch": 0.9864300626304802,
+ "grad_norm": 1.0836797952651978,
+ "learning_rate": 4.82868767974185e-06,
+ "loss": 0.5949,
+ "step": 315
+ },
+ {
+ "epoch": 0.9895615866388309,
+ "grad_norm": 0.7985473871231079,
+ "learning_rate": 4.827109039979226e-06,
+ "loss": 0.6057,
+ "step": 316
+ },
+ {
+ "epoch": 0.9926931106471816,
+ "grad_norm": 1.042951226234436,
+ "learning_rate": 4.825523420435469e-06,
+ "loss": 0.6004,
+ "step": 317
+ },
+ {
+ "epoch": 0.9958246346555324,
+ "grad_norm": 0.7845115661621094,
+ "learning_rate": 4.823930825866381e-06,
+ "loss": 0.6161,
+ "step": 318
+ },
+ {
+ "epoch": 0.9989561586638831,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.82233126104869e-06,
+ "loss": 0.5912,
+ "step": 319
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.931854784488678,
+ "learning_rate": 4.8207247307800275e-06,
+ "loss": 0.1914,
+ "step": 320
+ },
+ {
+ "epoch": 1.0031315240083507,
+ "grad_norm": 0.751028835773468,
+ "learning_rate": 4.819111239878916e-06,
+ "loss": 0.5802,
+ "step": 321
+ },
+ {
+ "epoch": 1.0062630480167014,
+ "grad_norm": 1.4943569898605347,
+ "learning_rate": 4.817490793184758e-06,
+ "loss": 0.613,
+ "step": 322
+ },
+ {
+ "epoch": 1.0093945720250521,
+ "grad_norm": 2.296318531036377,
+ "learning_rate": 4.815863395557816e-06,
+ "loss": 0.5453,
+ "step": 323
+ },
+ {
+ "epoch": 1.0125260960334028,
+ "grad_norm": 0.760101318359375,
+ "learning_rate": 4.814229051879202e-06,
+ "loss": 0.5302,
+ "step": 324
+ },
+ {
+ "epoch": 1.0156576200417538,
+ "grad_norm": 0.8145846128463745,
+ "learning_rate": 4.812587767050861e-06,
+ "loss": 0.5831,
+ "step": 325
+ },
+ {
+ "epoch": 1.0187891440501045,
+ "grad_norm": 0.9169796109199524,
+ "learning_rate": 4.8109395459955565e-06,
+ "loss": 0.5756,
+ "step": 326
+ },
+ {
+ "epoch": 1.0219206680584552,
+ "grad_norm": 0.8791524171829224,
+ "learning_rate": 4.809284393656858e-06,
+ "loss": 0.5988,
+ "step": 327
+ },
+ {
+ "epoch": 1.0250521920668059,
+ "grad_norm": 1.0184170007705688,
+ "learning_rate": 4.807622314999122e-06,
+ "loss": 0.5476,
+ "step": 328
+ },
+ {
+ "epoch": 1.0281837160751566,
+ "grad_norm": 0.8095184564590454,
+ "learning_rate": 4.8059533150074805e-06,
+ "loss": 0.5723,
+ "step": 329
+ },
+ {
+ "epoch": 1.0313152400835073,
+ "grad_norm": 0.7621930241584778,
+ "learning_rate": 4.804277398687826e-06,
+ "loss": 0.5841,
+ "step": 330
+ },
+ {
+ "epoch": 1.034446764091858,
+ "grad_norm": 3.729628324508667,
+ "learning_rate": 4.802594571066791e-06,
+ "loss": 0.5639,
+ "step": 331
+ },
+ {
+ "epoch": 1.0375782881002087,
+ "grad_norm": 1.6502974033355713,
+ "learning_rate": 4.800904837191743e-06,
+ "loss": 0.6024,
+ "step": 332
+ },
+ {
+ "epoch": 1.0407098121085594,
+ "grad_norm": 0.8031198978424072,
+ "learning_rate": 4.799208202130762e-06,
+ "loss": 0.5305,
+ "step": 333
+ },
+ {
+ "epoch": 1.0438413361169103,
+ "grad_norm": 0.939644992351532,
+ "learning_rate": 4.797504670972623e-06,
+ "loss": 0.5446,
+ "step": 334
+ },
+ {
+ "epoch": 1.046972860125261,
+ "grad_norm": 1.0589954853057861,
+ "learning_rate": 4.795794248826789e-06,
+ "loss": 0.5366,
+ "step": 335
+ },
+ {
+ "epoch": 1.0501043841336117,
+ "grad_norm": 0.9089614748954773,
+ "learning_rate": 4.794076940823391e-06,
+ "loss": 0.5795,
+ "step": 336
+ },
+ {
+ "epoch": 1.0532359081419624,
+ "grad_norm": 0.7732561230659485,
+ "learning_rate": 4.792352752113212e-06,
+ "loss": 0.5765,
+ "step": 337
+ },
+ {
+ "epoch": 1.0563674321503131,
+ "grad_norm": 1.811553955078125,
+ "learning_rate": 4.790621687867672e-06,
+ "loss": 0.561,
+ "step": 338
+ },
+ {
+ "epoch": 1.0594989561586639,
+ "grad_norm": 1.1930758953094482,
+ "learning_rate": 4.788883753278813e-06,
+ "loss": 0.5,
+ "step": 339
+ },
+ {
+ "epoch": 1.0626304801670146,
+ "grad_norm": 0.9551813006401062,
+ "learning_rate": 4.787138953559285e-06,
+ "loss": 0.5228,
+ "step": 340
+ },
+ {
+ "epoch": 1.0657620041753653,
+ "grad_norm": 0.9609586596488953,
+ "learning_rate": 4.785387293942329e-06,
+ "loss": 0.5827,
+ "step": 341
+ },
+ {
+ "epoch": 1.068893528183716,
+ "grad_norm": 0.8403449654579163,
+ "learning_rate": 4.783628779681759e-06,
+ "loss": 0.5585,
+ "step": 342
+ },
+ {
+ "epoch": 1.072025052192067,
+ "grad_norm": 0.9108251929283142,
+ "learning_rate": 4.7818634160519496e-06,
+ "loss": 0.6077,
+ "step": 343
+ },
+ {
+ "epoch": 1.0751565762004176,
+ "grad_norm": 0.9476898908615112,
+ "learning_rate": 4.780091208347819e-06,
+ "loss": 0.5493,
+ "step": 344
+ },
+ {
+ "epoch": 1.0782881002087683,
+ "grad_norm": 1.1943707466125488,
+ "learning_rate": 4.778312161884813e-06,
+ "loss": 0.5736,
+ "step": 345
+ },
+ {
+ "epoch": 1.081419624217119,
+ "grad_norm": 3.1342639923095703,
+ "learning_rate": 4.77652628199889e-06,
+ "loss": 0.5765,
+ "step": 346
+ },
+ {
+ "epoch": 1.0845511482254697,
+ "grad_norm": 2.7982125282287598,
+ "learning_rate": 4.7747335740465015e-06,
+ "loss": 0.6003,
+ "step": 347
+ },
+ {
+ "epoch": 1.0876826722338204,
+ "grad_norm": 1.5068914890289307,
+ "learning_rate": 4.7729340434045815e-06,
+ "loss": 0.5033,
+ "step": 348
+ },
+ {
+ "epoch": 1.0908141962421711,
+ "grad_norm": 0.8273429274559021,
+ "learning_rate": 4.771127695470527e-06,
+ "loss": 0.5309,
+ "step": 349
+ },
+ {
+ "epoch": 1.0939457202505218,
+ "grad_norm": 1.104974389076233,
+ "learning_rate": 4.76931453566218e-06,
+ "loss": 0.5244,
+ "step": 350
+ },
+ {
+ "epoch": 1.0970772442588728,
+ "grad_norm": 1.096509337425232,
+ "learning_rate": 4.7674945694178166e-06,
+ "loss": 0.5585,
+ "step": 351
+ },
+ {
+ "epoch": 1.1002087682672235,
+ "grad_norm": 1.0238200426101685,
+ "learning_rate": 4.765667802196127e-06,
+ "loss": 0.5589,
+ "step": 352
+ },
+ {
+ "epoch": 1.1033402922755742,
+ "grad_norm": 0.7515526413917542,
+ "learning_rate": 4.763834239476197e-06,
+ "loss": 0.5304,
+ "step": 353
+ },
+ {
+ "epoch": 1.1064718162839249,
+ "grad_norm": 1.0282566547393799,
+ "learning_rate": 4.761993886757499e-06,
+ "loss": 0.5476,
+ "step": 354
+ },
+ {
+ "epoch": 1.1096033402922756,
+ "grad_norm": 0.9962708950042725,
+ "learning_rate": 4.760146749559868e-06,
+ "loss": 0.5117,
+ "step": 355
+ },
+ {
+ "epoch": 1.1127348643006263,
+ "grad_norm": 0.7851671576499939,
+ "learning_rate": 4.758292833423488e-06,
+ "loss": 0.5542,
+ "step": 356
+ },
+ {
+ "epoch": 1.115866388308977,
+ "grad_norm": 0.8857759237289429,
+ "learning_rate": 4.756432143908876e-06,
+ "loss": 0.544,
+ "step": 357
+ },
+ {
+ "epoch": 1.1189979123173277,
+ "grad_norm": 0.9402740597724915,
+ "learning_rate": 4.7545646865968645e-06,
+ "loss": 0.5656,
+ "step": 358
+ },
+ {
+ "epoch": 1.1221294363256784,
+ "grad_norm": 0.8210407495498657,
+ "learning_rate": 4.752690467088584e-06,
+ "loss": 0.5733,
+ "step": 359
+ },
+ {
+ "epoch": 1.1252609603340291,
+ "grad_norm": 0.795684278011322,
+ "learning_rate": 4.750809491005449e-06,
+ "loss": 0.5678,
+ "step": 360
+ },
+ {
+ "epoch": 1.12839248434238,
+ "grad_norm": 0.8712463974952698,
+ "learning_rate": 4.748921763989139e-06,
+ "loss": 0.5777,
+ "step": 361
+ },
+ {
+ "epoch": 1.1315240083507307,
+ "grad_norm": 0.9810119867324829,
+ "learning_rate": 4.747027291701578e-06,
+ "loss": 0.5511,
+ "step": 362
+ },
+ {
+ "epoch": 1.1346555323590815,
+ "grad_norm": 0.81117844581604,
+ "learning_rate": 4.745126079824926e-06,
+ "loss": 0.5038,
+ "step": 363
+ },
+ {
+ "epoch": 1.1377870563674322,
+ "grad_norm": 0.7631494402885437,
+ "learning_rate": 4.743218134061556e-06,
+ "loss": 0.6272,
+ "step": 364
+ },
+ {
+ "epoch": 1.1409185803757829,
+ "grad_norm": 0.7601696252822876,
+ "learning_rate": 4.741303460134038e-06,
+ "loss": 0.571,
+ "step": 365
+ },
+ {
+ "epoch": 1.1440501043841336,
+ "grad_norm": 1.7977744340896606,
+ "learning_rate": 4.7393820637851205e-06,
+ "loss": 0.538,
+ "step": 366
+ },
+ {
+ "epoch": 1.1471816283924843,
+ "grad_norm": 2.022578001022339,
+ "learning_rate": 4.737453950777718e-06,
+ "loss": 0.5822,
+ "step": 367
+ },
+ {
+ "epoch": 1.150313152400835,
+ "grad_norm": 0.7586764693260193,
+ "learning_rate": 4.735519126894885e-06,
+ "loss": 0.5986,
+ "step": 368
+ },
+ {
+ "epoch": 1.153444676409186,
+ "grad_norm": 0.8970286846160889,
+ "learning_rate": 4.733577597939812e-06,
+ "loss": 0.542,
+ "step": 369
+ },
+ {
+ "epoch": 1.1565762004175366,
+ "grad_norm": 0.8546352982521057,
+ "learning_rate": 4.731629369735793e-06,
+ "loss": 0.5832,
+ "step": 370
+ },
+ {
+ "epoch": 1.1597077244258873,
+ "grad_norm": 0.9266164898872375,
+ "learning_rate": 4.72967444812622e-06,
+ "loss": 0.551,
+ "step": 371
+ },
+ {
+ "epoch": 1.162839248434238,
+ "grad_norm": 1.0413658618927002,
+ "learning_rate": 4.7277128389745595e-06,
+ "loss": 0.5866,
+ "step": 372
+ },
+ {
+ "epoch": 1.1659707724425887,
+ "grad_norm": 0.9312199950218201,
+ "learning_rate": 4.7257445481643334e-06,
+ "loss": 0.5723,
+ "step": 373
+ },
+ {
+ "epoch": 1.1691022964509394,
+ "grad_norm": 0.7389806509017944,
+ "learning_rate": 4.723769581599109e-06,
+ "loss": 0.5209,
+ "step": 374
+ },
+ {
+ "epoch": 1.1722338204592901,
+ "grad_norm": 3.053169012069702,
+ "learning_rate": 4.721787945202472e-06,
+ "loss": 0.6094,
+ "step": 375
+ },
+ {
+ "epoch": 1.1753653444676408,
+ "grad_norm": 1.288589596748352,
+ "learning_rate": 4.719799644918017e-06,
+ "loss": 0.5616,
+ "step": 376
+ },
+ {
+ "epoch": 1.1784968684759916,
+ "grad_norm": 0.7675042152404785,
+ "learning_rate": 4.717804686709323e-06,
+ "loss": 0.4963,
+ "step": 377
+ },
+ {
+ "epoch": 1.1816283924843423,
+ "grad_norm": 0.7246491312980652,
+ "learning_rate": 4.715803076559938e-06,
+ "loss": 0.5273,
+ "step": 378
+ },
+ {
+ "epoch": 1.1847599164926932,
+ "grad_norm": 0.8193361759185791,
+ "learning_rate": 4.713794820473366e-06,
+ "loss": 0.6107,
+ "step": 379
+ },
+ {
+ "epoch": 1.187891440501044,
+ "grad_norm": 0.9498510360717773,
+ "learning_rate": 4.711779924473037e-06,
+ "loss": 0.5421,
+ "step": 380
+ },
+ {
+ "epoch": 1.1910229645093946,
+ "grad_norm": 1.0479756593704224,
+ "learning_rate": 4.709758394602305e-06,
+ "loss": 0.5257,
+ "step": 381
+ },
+ {
+ "epoch": 1.1941544885177453,
+ "grad_norm": 0.907866895198822,
+ "learning_rate": 4.707730236924413e-06,
+ "loss": 0.5289,
+ "step": 382
+ },
+ {
+ "epoch": 1.197286012526096,
+ "grad_norm": 0.8861165642738342,
+ "learning_rate": 4.705695457522488e-06,
+ "loss": 0.5727,
+ "step": 383
+ },
+ {
+ "epoch": 1.2004175365344467,
+ "grad_norm": 0.7467761039733887,
+ "learning_rate": 4.703654062499516e-06,
+ "loss": 0.5602,
+ "step": 384
+ },
+ {
+ "epoch": 1.2035490605427974,
+ "grad_norm": 0.7456198334693909,
+ "learning_rate": 4.701606057978325e-06,
+ "loss": 0.5345,
+ "step": 385
+ },
+ {
+ "epoch": 1.2066805845511483,
+ "grad_norm": 1.9976060390472412,
+ "learning_rate": 4.699551450101571e-06,
+ "loss": 0.5504,
+ "step": 386
+ },
+ {
+ "epoch": 1.209812108559499,
+ "grad_norm": 1.5253807306289673,
+ "learning_rate": 4.697490245031709e-06,
+ "loss": 0.5568,
+ "step": 387
+ },
+ {
+ "epoch": 1.2129436325678498,
+ "grad_norm": 1.0786075592041016,
+ "learning_rate": 4.6954224489509885e-06,
+ "loss": 0.5564,
+ "step": 388
+ },
+ {
+ "epoch": 1.2160751565762005,
+ "grad_norm": 0.8385995030403137,
+ "learning_rate": 4.693348068061422e-06,
+ "loss": 0.5341,
+ "step": 389
+ },
+ {
+ "epoch": 1.2192066805845512,
+ "grad_norm": 0.8184949159622192,
+ "learning_rate": 4.691267108584774e-06,
+ "loss": 0.5614,
+ "step": 390
+ },
+ {
+ "epoch": 1.2223382045929019,
+ "grad_norm": 0.9964898824691772,
+ "learning_rate": 4.68917957676254e-06,
+ "loss": 0.5589,
+ "step": 391
+ },
+ {
+ "epoch": 1.2254697286012526,
+ "grad_norm": 1.0168914794921875,
+ "learning_rate": 4.687085478855931e-06,
+ "loss": 0.5892,
+ "step": 392
+ },
+ {
+ "epoch": 1.2286012526096033,
+ "grad_norm": 0.8841140866279602,
+ "learning_rate": 4.684984821145846e-06,
+ "loss": 0.5327,
+ "step": 393
+ },
+ {
+ "epoch": 1.231732776617954,
+ "grad_norm": 0.834431529045105,
+ "learning_rate": 4.682877609932866e-06,
+ "loss": 0.5594,
+ "step": 394
+ },
+ {
+ "epoch": 1.2348643006263047,
+ "grad_norm": 0.7256641983985901,
+ "learning_rate": 4.6807638515372234e-06,
+ "loss": 0.5443,
+ "step": 395
+ },
+ {
+ "epoch": 1.2379958246346556,
+ "grad_norm": 0.765096127986908,
+ "learning_rate": 4.678643552298788e-06,
+ "loss": 0.5439,
+ "step": 396
+ },
+ {
+ "epoch": 1.2411273486430063,
+ "grad_norm": 0.8760455846786499,
+ "learning_rate": 4.676516718577051e-06,
+ "loss": 0.5485,
+ "step": 397
+ },
+ {
+ "epoch": 1.244258872651357,
+ "grad_norm": 2.7111501693725586,
+ "learning_rate": 4.674383356751099e-06,
+ "loss": 0.5696,
+ "step": 398
+ },
+ {
+ "epoch": 1.2473903966597077,
+ "grad_norm": 1.0521738529205322,
+ "learning_rate": 4.672243473219601e-06,
+ "loss": 0.5503,
+ "step": 399
+ },
+ {
+ "epoch": 1.2505219206680585,
+ "grad_norm": 0.8909669518470764,
+ "learning_rate": 4.670097074400785e-06,
+ "loss": 0.5183,
+ "step": 400
+ },
+ {
+ "epoch": 1.2536534446764092,
+ "grad_norm": 0.7483847737312317,
+ "learning_rate": 4.667944166732424e-06,
+ "loss": 0.5669,
+ "step": 401
+ },
+ {
+ "epoch": 1.2567849686847599,
+ "grad_norm": 1.146997094154358,
+ "learning_rate": 4.665784756671808e-06,
+ "loss": 0.5464,
+ "step": 402
+ },
+ {
+ "epoch": 1.2599164926931106,
+ "grad_norm": 0.8998096585273743,
+ "learning_rate": 4.663618850695733e-06,
+ "loss": 0.5502,
+ "step": 403
+ },
+ {
+ "epoch": 1.2630480167014615,
+ "grad_norm": 0.8882688283920288,
+ "learning_rate": 4.6614464553004795e-06,
+ "loss": 0.5507,
+ "step": 404
+ },
+ {
+ "epoch": 1.2661795407098122,
+ "grad_norm": 0.8310684561729431,
+ "learning_rate": 4.659267577001789e-06,
+ "loss": 0.5164,
+ "step": 405
+ },
+ {
+ "epoch": 1.269311064718163,
+ "grad_norm": 0.9286114573478699,
+ "learning_rate": 4.657082222334851e-06,
+ "loss": 0.4813,
+ "step": 406
+ },
+ {
+ "epoch": 1.2724425887265136,
+ "grad_norm": 1.2394906282424927,
+ "learning_rate": 4.654890397854275e-06,
+ "loss": 0.5837,
+ "step": 407
+ },
+ {
+ "epoch": 1.2755741127348643,
+ "grad_norm": 4.00585412979126,
+ "learning_rate": 4.652692110134079e-06,
+ "loss": 0.5453,
+ "step": 408
+ },
+ {
+ "epoch": 1.278705636743215,
+ "grad_norm": 1.1667803525924683,
+ "learning_rate": 4.650487365767667e-06,
+ "loss": 0.5652,
+ "step": 409
+ },
+ {
+ "epoch": 1.2818371607515657,
+ "grad_norm": 0.9351289868354797,
+ "learning_rate": 4.648276171367807e-06,
+ "loss": 0.5576,
+ "step": 410
+ },
+ {
+ "epoch": 1.2849686847599164,
+ "grad_norm": 0.8107728958129883,
+ "learning_rate": 4.646058533566614e-06,
+ "loss": 0.5821,
+ "step": 411
+ },
+ {
+ "epoch": 1.2881002087682671,
+ "grad_norm": 0.7293011546134949,
+ "learning_rate": 4.643834459015525e-06,
+ "loss": 0.5363,
+ "step": 412
+ },
+ {
+ "epoch": 1.2912317327766178,
+ "grad_norm": 0.7550690770149231,
+ "learning_rate": 4.641603954385289e-06,
+ "loss": 0.53,
+ "step": 413
+ },
+ {
+ "epoch": 1.2943632567849686,
+ "grad_norm": 0.7626177072525024,
+ "learning_rate": 4.639367026365938e-06,
+ "loss": 0.5307,
+ "step": 414
+ },
+ {
+ "epoch": 1.2974947807933195,
+ "grad_norm": 1.0841096639633179,
+ "learning_rate": 4.637123681666769e-06,
+ "loss": 0.5162,
+ "step": 415
+ },
+ {
+ "epoch": 1.3006263048016702,
+ "grad_norm": 0.8814271092414856,
+ "learning_rate": 4.634873927016326e-06,
+ "loss": 0.5369,
+ "step": 416
+ },
+ {
+ "epoch": 1.303757828810021,
+ "grad_norm": 0.7402971982955933,
+ "learning_rate": 4.632617769162378e-06,
+ "loss": 0.5846,
+ "step": 417
+ },
+ {
+ "epoch": 1.3068893528183716,
+ "grad_norm": 0.8106061220169067,
+ "learning_rate": 4.6303552148719e-06,
+ "loss": 0.5289,
+ "step": 418
+ },
+ {
+ "epoch": 1.3100208768267223,
+ "grad_norm": 0.9241361618041992,
+ "learning_rate": 4.628086270931053e-06,
+ "loss": 0.5714,
+ "step": 419
+ },
+ {
+ "epoch": 1.313152400835073,
+ "grad_norm": 0.950332522392273,
+ "learning_rate": 4.625810944145159e-06,
+ "loss": 0.5817,
+ "step": 420
+ },
+ {
+ "epoch": 1.316283924843424,
+ "grad_norm": 0.9037718772888184,
+ "learning_rate": 4.623529241338689e-06,
+ "loss": 0.5902,
+ "step": 421
+ },
+ {
+ "epoch": 1.3194154488517746,
+ "grad_norm": 1.2110658884048462,
+ "learning_rate": 4.621241169355234e-06,
+ "loss": 0.561,
+ "step": 422
+ },
+ {
+ "epoch": 1.3225469728601253,
+ "grad_norm": 0.8582742214202881,
+ "learning_rate": 4.618946735057491e-06,
+ "loss": 0.5003,
+ "step": 423
+ },
+ {
+ "epoch": 1.325678496868476,
+ "grad_norm": 0.9203405976295471,
+ "learning_rate": 4.6166459453272386e-06,
+ "loss": 0.5639,
+ "step": 424
+ },
+ {
+ "epoch": 1.3288100208768268,
+ "grad_norm": 0.933721125125885,
+ "learning_rate": 4.614338807065317e-06,
+ "loss": 0.5766,
+ "step": 425
+ },
+ {
+ "epoch": 1.3319415448851775,
+ "grad_norm": 0.8435131311416626,
+ "learning_rate": 4.612025327191608e-06,
+ "loss": 0.5656,
+ "step": 426
+ },
+ {
+ "epoch": 1.3350730688935282,
+ "grad_norm": 0.795796811580658,
+ "learning_rate": 4.609705512645015e-06,
+ "loss": 0.4996,
+ "step": 427
+ },
+ {
+ "epoch": 1.3382045929018789,
+ "grad_norm": 0.8168228268623352,
+ "learning_rate": 4.6073793703834404e-06,
+ "loss": 0.5465,
+ "step": 428
+ },
+ {
+ "epoch": 1.3413361169102296,
+ "grad_norm": 0.8795569539070129,
+ "learning_rate": 4.605046907383765e-06,
+ "loss": 0.5407,
+ "step": 429
+ },
+ {
+ "epoch": 1.3444676409185803,
+ "grad_norm": 0.8504094481468201,
+ "learning_rate": 4.6027081306418295e-06,
+ "loss": 0.5589,
+ "step": 430
+ },
+ {
+ "epoch": 1.347599164926931,
+ "grad_norm": 1.485202431678772,
+ "learning_rate": 4.600363047172409e-06,
+ "loss": 0.5515,
+ "step": 431
+ },
+ {
+ "epoch": 1.350730688935282,
+ "grad_norm": 1.1156851053237915,
+ "learning_rate": 4.598011664009197e-06,
+ "loss": 0.5681,
+ "step": 432
+ },
+ {
+ "epoch": 1.3538622129436326,
+ "grad_norm": 0.8666876554489136,
+ "learning_rate": 4.595653988204779e-06,
+ "loss": 0.5451,
+ "step": 433
+ },
+ {
+ "epoch": 1.3569937369519833,
+ "grad_norm": 0.8192381858825684,
+ "learning_rate": 4.593290026830619e-06,
+ "loss": 0.5632,
+ "step": 434
+ },
+ {
+ "epoch": 1.360125260960334,
+ "grad_norm": 0.7994804978370667,
+ "learning_rate": 4.590919786977029e-06,
+ "loss": 0.5181,
+ "step": 435
+ },
+ {
+ "epoch": 1.3632567849686847,
+ "grad_norm": 0.8038607835769653,
+ "learning_rate": 4.5885432757531535e-06,
+ "loss": 0.5385,
+ "step": 436
+ },
+ {
+ "epoch": 1.3663883089770354,
+ "grad_norm": 0.7677503824234009,
+ "learning_rate": 4.586160500286948e-06,
+ "loss": 0.5455,
+ "step": 437
+ },
+ {
+ "epoch": 1.3695198329853862,
+ "grad_norm": 0.8293285369873047,
+ "learning_rate": 4.583771467725157e-06,
+ "loss": 0.5401,
+ "step": 438
+ },
+ {
+ "epoch": 1.372651356993737,
+ "grad_norm": 0.8607680797576904,
+ "learning_rate": 4.581376185233289e-06,
+ "loss": 0.5782,
+ "step": 439
+ },
+ {
+ "epoch": 1.3757828810020878,
+ "grad_norm": 0.8847081065177917,
+ "learning_rate": 4.578974659995601e-06,
+ "loss": 0.572,
+ "step": 440
+ },
+ {
+ "epoch": 1.3789144050104385,
+ "grad_norm": 0.7669641971588135,
+ "learning_rate": 4.576566899215075e-06,
+ "loss": 0.5655,
+ "step": 441
+ },
+ {
+ "epoch": 1.3820459290187892,
+ "grad_norm": 0.8514629006385803,
+ "learning_rate": 4.5741529101133904e-06,
+ "loss": 0.5218,
+ "step": 442
+ },
+ {
+ "epoch": 1.38517745302714,
+ "grad_norm": 0.8719842433929443,
+ "learning_rate": 4.5717326999309145e-06,
+ "loss": 0.5579,
+ "step": 443
+ },
+ {
+ "epoch": 1.3883089770354906,
+ "grad_norm": 1.1142809391021729,
+ "learning_rate": 4.569306275926667e-06,
+ "loss": 0.5535,
+ "step": 444
+ },
+ {
+ "epoch": 1.3914405010438413,
+ "grad_norm": 0.7392387986183167,
+ "learning_rate": 4.566873645378309e-06,
+ "loss": 0.5335,
+ "step": 445
+ },
+ {
+ "epoch": 1.394572025052192,
+ "grad_norm": 0.9066658616065979,
+ "learning_rate": 4.564434815582117e-06,
+ "loss": 0.5286,
+ "step": 446
+ },
+ {
+ "epoch": 1.3977035490605427,
+ "grad_norm": 0.8648932576179504,
+ "learning_rate": 4.561989793852959e-06,
+ "loss": 0.5008,
+ "step": 447
+ },
+ {
+ "epoch": 1.4008350730688934,
+ "grad_norm": 0.7768712043762207,
+ "learning_rate": 4.559538587524276e-06,
+ "loss": 0.5727,
+ "step": 448
+ },
+ {
+ "epoch": 1.4039665970772441,
+ "grad_norm": 0.7851182222366333,
+ "learning_rate": 4.557081203948059e-06,
+ "loss": 0.5731,
+ "step": 449
+ },
+ {
+ "epoch": 1.407098121085595,
+ "grad_norm": 0.8959861397743225,
+ "learning_rate": 4.5546176504948255e-06,
+ "loss": 0.5587,
+ "step": 450
+ },
+ {
+ "epoch": 1.4102296450939458,
+ "grad_norm": 1.0538026094436646,
+ "learning_rate": 4.552147934553601e-06,
+ "loss": 0.5808,
+ "step": 451
+ },
+ {
+ "epoch": 1.4133611691022965,
+ "grad_norm": 0.9887629151344299,
+ "learning_rate": 4.54967206353189e-06,
+ "loss": 0.5658,
+ "step": 452
+ },
+ {
+ "epoch": 1.4164926931106472,
+ "grad_norm": 0.9579302072525024,
+ "learning_rate": 4.547190044855663e-06,
+ "loss": 0.5092,
+ "step": 453
+ },
+ {
+ "epoch": 1.4196242171189979,
+ "grad_norm": 0.6993522047996521,
+ "learning_rate": 4.544701885969326e-06,
+ "loss": 0.5233,
+ "step": 454
+ },
+ {
+ "epoch": 1.4227557411273486,
+ "grad_norm": 0.8197568655014038,
+ "learning_rate": 4.542207594335703e-06,
+ "loss": 0.553,
+ "step": 455
+ },
+ {
+ "epoch": 1.4258872651356993,
+ "grad_norm": 2.921947717666626,
+ "learning_rate": 4.53970717743601e-06,
+ "loss": 0.4857,
+ "step": 456
+ },
+ {
+ "epoch": 1.4290187891440502,
+ "grad_norm": 1.3547242879867554,
+ "learning_rate": 4.53720064276984e-06,
+ "loss": 0.5676,
+ "step": 457
+ },
+ {
+ "epoch": 1.432150313152401,
+ "grad_norm": 1.4175567626953125,
+ "learning_rate": 4.534687997855131e-06,
+ "loss": 0.5164,
+ "step": 458
+ },
+ {
+ "epoch": 1.4352818371607516,
+ "grad_norm": 1.378146767616272,
+ "learning_rate": 4.532169250228145e-06,
+ "loss": 0.5429,
+ "step": 459
+ },
+ {
+ "epoch": 1.4384133611691023,
+ "grad_norm": 0.7811698317527771,
+ "learning_rate": 4.529644407443456e-06,
+ "loss": 0.524,
+ "step": 460
+ },
+ {
+ "epoch": 1.441544885177453,
+ "grad_norm": 1.1481678485870361,
+ "learning_rate": 4.527113477073914e-06,
+ "loss": 0.5513,
+ "step": 461
+ },
+ {
+ "epoch": 1.4446764091858038,
+ "grad_norm": 0.8450161218643188,
+ "learning_rate": 4.5245764667106266e-06,
+ "loss": 0.5632,
+ "step": 462
+ },
+ {
+ "epoch": 1.4478079331941545,
+ "grad_norm": 1.1582145690917969,
+ "learning_rate": 4.522033383962941e-06,
+ "loss": 0.5834,
+ "step": 463
+ },
+ {
+ "epoch": 1.4509394572025052,
+ "grad_norm": 1.0403447151184082,
+ "learning_rate": 4.519484236458416e-06,
+ "loss": 0.506,
+ "step": 464
+ },
+ {
+ "epoch": 1.4540709812108559,
+ "grad_norm": 0.7894920706748962,
+ "learning_rate": 4.516929031842799e-06,
+ "loss": 0.5526,
+ "step": 465
+ },
+ {
+ "epoch": 1.4572025052192066,
+ "grad_norm": 0.8092262744903564,
+ "learning_rate": 4.51436777778001e-06,
+ "loss": 0.5619,
+ "step": 466
+ },
+ {
+ "epoch": 1.4603340292275573,
+ "grad_norm": 0.9773806929588318,
+ "learning_rate": 4.511800481952106e-06,
+ "loss": 0.5179,
+ "step": 467
+ },
+ {
+ "epoch": 1.4634655532359082,
+ "grad_norm": 1.018676519393921,
+ "learning_rate": 4.509227152059271e-06,
+ "loss": 0.5415,
+ "step": 468
+ },
+ {
+ "epoch": 1.466597077244259,
+ "grad_norm": 0.7457838654518127,
+ "learning_rate": 4.506647795819784e-06,
+ "loss": 0.5473,
+ "step": 469
+ },
+ {
+ "epoch": 1.4697286012526096,
+ "grad_norm": 0.7826436161994934,
+ "learning_rate": 4.50406242097e-06,
+ "loss": 0.5526,
+ "step": 470
+ },
+ {
+ "epoch": 1.4728601252609603,
+ "grad_norm": 0.9492483139038086,
+ "learning_rate": 4.501471035264328e-06,
+ "loss": 0.5179,
+ "step": 471
+ },
+ {
+ "epoch": 1.475991649269311,
+ "grad_norm": 0.93398517370224,
+ "learning_rate": 4.4988736464752005e-06,
+ "loss": 0.5195,
+ "step": 472
+ },
+ {
+ "epoch": 1.4791231732776617,
+ "grad_norm": 0.8396487832069397,
+ "learning_rate": 4.496270262393061e-06,
+ "loss": 0.5447,
+ "step": 473
+ },
+ {
+ "epoch": 1.4822546972860124,
+ "grad_norm": 0.7450584173202515,
+ "learning_rate": 4.4936608908263315e-06,
+ "loss": 0.5207,
+ "step": 474
+ },
+ {
+ "epoch": 1.4853862212943634,
+ "grad_norm": 0.7887717485427856,
+ "learning_rate": 4.491045539601392e-06,
+ "loss": 0.523,
+ "step": 475
+ },
+ {
+ "epoch": 1.488517745302714,
+ "grad_norm": 1.2051388025283813,
+ "learning_rate": 4.48842421656256e-06,
+ "loss": 0.5402,
+ "step": 476
+ },
+ {
+ "epoch": 1.4916492693110648,
+ "grad_norm": 2.3103389739990234,
+ "learning_rate": 4.485796929572063e-06,
+ "loss": 0.5588,
+ "step": 477
+ },
+ {
+ "epoch": 1.4947807933194155,
+ "grad_norm": 0.7473112344741821,
+ "learning_rate": 4.483163686510016e-06,
+ "loss": 0.5731,
+ "step": 478
+ },
+ {
+ "epoch": 1.4979123173277662,
+ "grad_norm": 0.7545126676559448,
+ "learning_rate": 4.480524495274399e-06,
+ "loss": 0.5536,
+ "step": 479
+ },
+ {
+ "epoch": 1.501043841336117,
+ "grad_norm": 0.7801297903060913,
+ "learning_rate": 4.477879363781033e-06,
+ "loss": 0.5696,
+ "step": 480
+ },
+ {
+ "epoch": 1.5041753653444676,
+ "grad_norm": 0.7740563750267029,
+ "learning_rate": 4.475228299963554e-06,
+ "loss": 0.5526,
+ "step": 481
+ },
+ {
+ "epoch": 1.5073068893528183,
+ "grad_norm": 0.8600060343742371,
+ "learning_rate": 4.4725713117733936e-06,
+ "loss": 0.5051,
+ "step": 482
+ },
+ {
+ "epoch": 1.510438413361169,
+ "grad_norm": 0.6934283971786499,
+ "learning_rate": 4.46990840717975e-06,
+ "loss": 0.5564,
+ "step": 483
+ },
+ {
+ "epoch": 1.5135699373695197,
+ "grad_norm": 0.8927920460700989,
+ "learning_rate": 4.46723959416957e-06,
+ "loss": 0.5529,
+ "step": 484
+ },
+ {
+ "epoch": 1.5167014613778704,
+ "grad_norm": 0.9570988416671753,
+ "learning_rate": 4.464564880747517e-06,
+ "loss": 0.5661,
+ "step": 485
+ },
+ {
+ "epoch": 1.5198329853862211,
+ "grad_norm": 0.7229202389717102,
+ "learning_rate": 4.461884274935956e-06,
+ "loss": 0.5964,
+ "step": 486
+ },
+ {
+ "epoch": 1.522964509394572,
+ "grad_norm": 0.7367239594459534,
+ "learning_rate": 4.4591977847749225e-06,
+ "loss": 0.5455,
+ "step": 487
+ },
+ {
+ "epoch": 1.5260960334029228,
+ "grad_norm": 0.8062120676040649,
+ "learning_rate": 4.456505418322103e-06,
+ "loss": 0.5735,
+ "step": 488
+ },
+ {
+ "epoch": 1.5292275574112735,
+ "grad_norm": 0.8854482769966125,
+ "learning_rate": 4.453807183652808e-06,
+ "loss": 0.5421,
+ "step": 489
+ },
+ {
+ "epoch": 1.5323590814196242,
+ "grad_norm": 0.7518959045410156,
+ "learning_rate": 4.451103088859951e-06,
+ "loss": 0.5083,
+ "step": 490
+ },
+ {
+ "epoch": 1.535490605427975,
+ "grad_norm": 0.8621206879615784,
+ "learning_rate": 4.448393142054016e-06,
+ "loss": 0.4712,
+ "step": 491
+ },
+ {
+ "epoch": 1.5386221294363258,
+ "grad_norm": 1.0618741512298584,
+ "learning_rate": 4.445677351363046e-06,
+ "loss": 0.5808,
+ "step": 492
+ },
+ {
+ "epoch": 1.5417536534446765,
+ "grad_norm": 0.8261345028877258,
+ "learning_rate": 4.442955724932607e-06,
+ "loss": 0.5625,
+ "step": 493
+ },
+ {
+ "epoch": 1.5448851774530272,
+ "grad_norm": 0.7067139744758606,
+ "learning_rate": 4.440228270925772e-06,
+ "loss": 0.5661,
+ "step": 494
+ },
+ {
+ "epoch": 1.548016701461378,
+ "grad_norm": 0.9234416484832764,
+ "learning_rate": 4.437494997523091e-06,
+ "loss": 0.5428,
+ "step": 495
+ },
+ {
+ "epoch": 1.5511482254697286,
+ "grad_norm": 0.9273470044136047,
+ "learning_rate": 4.434755912922567e-06,
+ "loss": 0.5388,
+ "step": 496
+ },
+ {
+ "epoch": 1.5542797494780793,
+ "grad_norm": 1.0163263082504272,
+ "learning_rate": 4.4320110253396345e-06,
+ "loss": 0.5409,
+ "step": 497
+ },
+ {
+ "epoch": 1.55741127348643,
+ "grad_norm": 0.9542096853256226,
+ "learning_rate": 4.429260343007133e-06,
+ "loss": 0.5329,
+ "step": 498
+ },
+ {
+ "epoch": 1.5605427974947808,
+ "grad_norm": 0.8076801896095276,
+ "learning_rate": 4.426503874175283e-06,
+ "loss": 0.5616,
+ "step": 499
+ },
+ {
+ "epoch": 1.5636743215031315,
+ "grad_norm": 1.0063767433166504,
+ "learning_rate": 4.423741627111658e-06,
+ "loss": 0.5369,
+ "step": 500
+ },
+ {
+ "epoch": 1.5668058455114822,
+ "grad_norm": 1.040286898612976,
+ "learning_rate": 4.420973610101166e-06,
+ "loss": 0.5474,
+ "step": 501
+ },
+ {
+ "epoch": 1.5699373695198329,
+ "grad_norm": 0.7832860946655273,
+ "learning_rate": 4.4181998314460164e-06,
+ "loss": 0.5486,
+ "step": 502
+ },
+ {
+ "epoch": 1.5730688935281836,
+ "grad_norm": 0.8162257075309753,
+ "learning_rate": 4.415420299465706e-06,
+ "loss": 0.5054,
+ "step": 503
+ },
+ {
+ "epoch": 1.5762004175365343,
+ "grad_norm": 0.9108433127403259,
+ "learning_rate": 4.4126350224969814e-06,
+ "loss": 0.5399,
+ "step": 504
+ },
+ {
+ "epoch": 1.5793319415448852,
+ "grad_norm": 0.8002520799636841,
+ "learning_rate": 4.409844008893824e-06,
+ "loss": 0.5485,
+ "step": 505
+ },
+ {
+ "epoch": 1.582463465553236,
+ "grad_norm": 0.8543248772621155,
+ "learning_rate": 4.407047267027423e-06,
+ "loss": 0.4984,
+ "step": 506
+ },
+ {
+ "epoch": 1.5855949895615866,
+ "grad_norm": 0.7154155373573303,
+ "learning_rate": 4.404244805286141e-06,
+ "loss": 0.5392,
+ "step": 507
+ },
+ {
+ "epoch": 1.5887265135699373,
+ "grad_norm": 0.818553626537323,
+ "learning_rate": 4.401436632075504e-06,
+ "loss": 0.5178,
+ "step": 508
+ },
+ {
+ "epoch": 1.5918580375782883,
+ "grad_norm": 0.7535017728805542,
+ "learning_rate": 4.398622755818167e-06,
+ "loss": 0.5446,
+ "step": 509
+ },
+ {
+ "epoch": 1.594989561586639,
+ "grad_norm": 0.9328975677490234,
+ "learning_rate": 4.395803184953889e-06,
+ "loss": 0.5546,
+ "step": 510
+ },
+ {
+ "epoch": 1.5981210855949897,
+ "grad_norm": 0.7960026860237122,
+ "learning_rate": 4.392977927939508e-06,
+ "loss": 0.5451,
+ "step": 511
+ },
+ {
+ "epoch": 1.6012526096033404,
+ "grad_norm": 0.9686267971992493,
+ "learning_rate": 4.3901469932489195e-06,
+ "loss": 0.5198,
+ "step": 512
+ },
+ {
+ "epoch": 1.604384133611691,
+ "grad_norm": 0.903137743473053,
+ "learning_rate": 4.387310389373047e-06,
+ "loss": 0.5395,
+ "step": 513
+ },
+ {
+ "epoch": 1.6075156576200418,
+ "grad_norm": 1.0728516578674316,
+ "learning_rate": 4.384468124819816e-06,
+ "loss": 0.5843,
+ "step": 514
+ },
+ {
+ "epoch": 1.6106471816283925,
+ "grad_norm": 1.0245436429977417,
+ "learning_rate": 4.3816202081141345e-06,
+ "loss": 0.5672,
+ "step": 515
+ },
+ {
+ "epoch": 1.6137787056367432,
+ "grad_norm": 0.9672732353210449,
+ "learning_rate": 4.378766647797858e-06,
+ "loss": 0.5369,
+ "step": 516
+ },
+ {
+ "epoch": 1.616910229645094,
+ "grad_norm": 0.9149513840675354,
+ "learning_rate": 4.375907452429774e-06,
+ "loss": 0.4628,
+ "step": 517
+ },
+ {
+ "epoch": 1.6200417536534446,
+ "grad_norm": 0.7543843984603882,
+ "learning_rate": 4.373042630585567e-06,
+ "loss": 0.5344,
+ "step": 518
+ },
+ {
+ "epoch": 1.6231732776617953,
+ "grad_norm": 0.7589017152786255,
+ "learning_rate": 4.370172190857801e-06,
+ "loss": 0.5672,
+ "step": 519
+ },
+ {
+ "epoch": 1.626304801670146,
+ "grad_norm": 0.803040623664856,
+ "learning_rate": 4.367296141855887e-06,
+ "loss": 0.5313,
+ "step": 520
+ },
+ {
+ "epoch": 1.6294363256784967,
+ "grad_norm": 0.8305794596672058,
+ "learning_rate": 4.3644144922060625e-06,
+ "loss": 0.5754,
+ "step": 521
+ },
+ {
+ "epoch": 1.6325678496868476,
+ "grad_norm": 1.0086486339569092,
+ "learning_rate": 4.361527250551361e-06,
+ "loss": 0.5433,
+ "step": 522
+ },
+ {
+ "epoch": 1.6356993736951984,
+ "grad_norm": 0.7217550277709961,
+ "learning_rate": 4.35863442555159e-06,
+ "loss": 0.524,
+ "step": 523
+ },
+ {
+ "epoch": 1.638830897703549,
+ "grad_norm": 0.7788524627685547,
+ "learning_rate": 4.355736025883303e-06,
+ "loss": 0.536,
+ "step": 524
+ },
+ {
+ "epoch": 1.6419624217118998,
+ "grad_norm": 0.8460550904273987,
+ "learning_rate": 4.352832060239774e-06,
+ "loss": 0.5381,
+ "step": 525
+ },
+ {
+ "epoch": 1.6450939457202505,
+ "grad_norm": 0.7571215033531189,
+ "learning_rate": 4.3499225373309675e-06,
+ "loss": 0.541,
+ "step": 526
+ },
+ {
+ "epoch": 1.6482254697286014,
+ "grad_norm": 0.7343226671218872,
+ "learning_rate": 4.347007465883523e-06,
+ "loss": 0.5147,
+ "step": 527
+ },
+ {
+ "epoch": 1.651356993736952,
+ "grad_norm": 0.7271892428398132,
+ "learning_rate": 4.3440868546407165e-06,
+ "loss": 0.5311,
+ "step": 528
+ },
+ {
+ "epoch": 1.6544885177453028,
+ "grad_norm": 0.8166136741638184,
+ "learning_rate": 4.341160712362442e-06,
+ "loss": 0.5379,
+ "step": 529
+ },
+ {
+ "epoch": 1.6576200417536535,
+ "grad_norm": 1.5985233783721924,
+ "learning_rate": 4.338229047825182e-06,
+ "loss": 0.5782,
+ "step": 530
+ },
+ {
+ "epoch": 1.6607515657620042,
+ "grad_norm": 0.7835702896118164,
+ "learning_rate": 4.3352918698219835e-06,
+ "loss": 0.525,
+ "step": 531
+ },
+ {
+ "epoch": 1.663883089770355,
+ "grad_norm": 0.7278687953948975,
+ "learning_rate": 4.332349187162428e-06,
+ "loss": 0.5266,
+ "step": 532
+ },
+ {
+ "epoch": 1.6670146137787056,
+ "grad_norm": 0.8240190148353577,
+ "learning_rate": 4.329401008672608e-06,
+ "loss": 0.5515,
+ "step": 533
+ },
+ {
+ "epoch": 1.6701461377870563,
+ "grad_norm": 0.9447080492973328,
+ "learning_rate": 4.326447343195102e-06,
+ "loss": 0.5596,
+ "step": 534
+ },
+ {
+ "epoch": 1.673277661795407,
+ "grad_norm": 0.7827372550964355,
+ "learning_rate": 4.323488199588944e-06,
+ "loss": 0.5466,
+ "step": 535
+ },
+ {
+ "epoch": 1.6764091858037578,
+ "grad_norm": 0.9252517223358154,
+ "learning_rate": 4.320523586729599e-06,
+ "loss": 0.5433,
+ "step": 536
+ },
+ {
+ "epoch": 1.6795407098121085,
+ "grad_norm": 0.9437504410743713,
+ "learning_rate": 4.317553513508934e-06,
+ "loss": 0.5552,
+ "step": 537
+ },
+ {
+ "epoch": 1.6826722338204592,
+ "grad_norm": 0.8972746133804321,
+ "learning_rate": 4.3145779888351986e-06,
+ "loss": 0.5259,
+ "step": 538
+ },
+ {
+ "epoch": 1.6858037578288099,
+ "grad_norm": 0.8017446994781494,
+ "learning_rate": 4.311597021632988e-06,
+ "loss": 0.5263,
+ "step": 539
+ },
+ {
+ "epoch": 1.6889352818371608,
+ "grad_norm": 0.7875497341156006,
+ "learning_rate": 4.3086106208432235e-06,
+ "loss": 0.5316,
+ "step": 540
+ },
+ {
+ "epoch": 1.6920668058455115,
+ "grad_norm": 0.8204905986785889,
+ "learning_rate": 4.305618795423125e-06,
+ "loss": 0.5506,
+ "step": 541
+ },
+ {
+ "epoch": 1.6951983298538622,
+ "grad_norm": 0.888359785079956,
+ "learning_rate": 4.30262155434618e-06,
+ "loss": 0.4825,
+ "step": 542
+ },
+ {
+ "epoch": 1.698329853862213,
+ "grad_norm": 1.1026058197021484,
+ "learning_rate": 4.29961890660212e-06,
+ "loss": 0.5321,
+ "step": 543
+ },
+ {
+ "epoch": 1.7014613778705638,
+ "grad_norm": 0.7662535905838013,
+ "learning_rate": 4.2966108611968945e-06,
+ "loss": 0.5432,
+ "step": 544
+ },
+ {
+ "epoch": 1.7045929018789145,
+ "grad_norm": 1.1951749324798584,
+ "learning_rate": 4.293597427152641e-06,
+ "loss": 0.5123,
+ "step": 545
+ },
+ {
+ "epoch": 1.7077244258872653,
+ "grad_norm": 1.303183913230896,
+ "learning_rate": 4.290578613507661e-06,
+ "loss": 0.5346,
+ "step": 546
+ },
+ {
+ "epoch": 1.710855949895616,
+ "grad_norm": 0.7653357982635498,
+ "learning_rate": 4.287554429316387e-06,
+ "loss": 0.5397,
+ "step": 547
+ },
+ {
+ "epoch": 1.7139874739039667,
+ "grad_norm": 0.796215295791626,
+ "learning_rate": 4.284524883649366e-06,
+ "loss": 0.5421,
+ "step": 548
+ },
+ {
+ "epoch": 1.7171189979123174,
+ "grad_norm": 0.7599332332611084,
+ "learning_rate": 4.281489985593219e-06,
+ "loss": 0.5289,
+ "step": 549
+ },
+ {
+ "epoch": 1.720250521920668,
+ "grad_norm": 0.8029115796089172,
+ "learning_rate": 4.2784497442506265e-06,
+ "loss": 0.5409,
+ "step": 550
+ },
+ {
+ "epoch": 1.7233820459290188,
+ "grad_norm": 0.7194099426269531,
+ "learning_rate": 4.275404168740291e-06,
+ "loss": 0.5327,
+ "step": 551
+ },
+ {
+ "epoch": 1.7265135699373695,
+ "grad_norm": 0.7960740923881531,
+ "learning_rate": 4.272353268196917e-06,
+ "loss": 0.4896,
+ "step": 552
+ },
+ {
+ "epoch": 1.7296450939457202,
+ "grad_norm": 0.9572116732597351,
+ "learning_rate": 4.269297051771178e-06,
+ "loss": 0.5402,
+ "step": 553
+ },
+ {
+ "epoch": 1.732776617954071,
+ "grad_norm": 1.3604938983917236,
+ "learning_rate": 4.266235528629695e-06,
+ "loss": 0.5792,
+ "step": 554
+ },
+ {
+ "epoch": 1.7359081419624216,
+ "grad_norm": 2.067286729812622,
+ "learning_rate": 4.263168707955002e-06,
+ "loss": 0.5033,
+ "step": 555
+ },
+ {
+ "epoch": 1.7390396659707723,
+ "grad_norm": 0.8031097054481506,
+ "learning_rate": 4.260096598945523e-06,
+ "loss": 0.5117,
+ "step": 556
+ },
+ {
+ "epoch": 1.742171189979123,
+ "grad_norm": 1.0241729021072388,
+ "learning_rate": 4.257019210815546e-06,
+ "loss": 0.5359,
+ "step": 557
+ },
+ {
+ "epoch": 1.745302713987474,
+ "grad_norm": 0.7625218629837036,
+ "learning_rate": 4.25393655279519e-06,
+ "loss": 0.5625,
+ "step": 558
+ },
+ {
+ "epoch": 1.7484342379958246,
+ "grad_norm": 0.8603503704071045,
+ "learning_rate": 4.250848634130381e-06,
+ "loss": 0.5043,
+ "step": 559
+ },
+ {
+ "epoch": 1.7515657620041754,
+ "grad_norm": 0.9543750286102295,
+ "learning_rate": 4.247755464082824e-06,
+ "loss": 0.5364,
+ "step": 560
+ },
+ {
+ "epoch": 1.754697286012526,
+ "grad_norm": 0.9707463979721069,
+ "learning_rate": 4.244657051929973e-06,
+ "loss": 0.5184,
+ "step": 561
+ },
+ {
+ "epoch": 1.757828810020877,
+ "grad_norm": 0.7491432428359985,
+ "learning_rate": 4.241553406965008e-06,
+ "loss": 0.559,
+ "step": 562
+ },
+ {
+ "epoch": 1.7609603340292277,
+ "grad_norm": 0.7444972991943359,
+ "learning_rate": 4.238444538496801e-06,
+ "loss": 0.5327,
+ "step": 563
+ },
+ {
+ "epoch": 1.7640918580375784,
+ "grad_norm": 2.7108678817749023,
+ "learning_rate": 4.235330455849892e-06,
+ "loss": 0.55,
+ "step": 564
+ },
+ {
+ "epoch": 1.767223382045929,
+ "grad_norm": 1.6716049909591675,
+ "learning_rate": 4.232211168364459e-06,
+ "loss": 0.5093,
+ "step": 565
+ },
+ {
+ "epoch": 1.7703549060542798,
+ "grad_norm": 0.7023475170135498,
+ "learning_rate": 4.229086685396295e-06,
+ "loss": 0.569,
+ "step": 566
+ },
+ {
+ "epoch": 1.7734864300626305,
+ "grad_norm": 0.8596265316009521,
+ "learning_rate": 4.225957016316771e-06,
+ "loss": 0.5128,
+ "step": 567
+ },
+ {
+ "epoch": 1.7766179540709812,
+ "grad_norm": 0.8110849857330322,
+ "learning_rate": 4.222822170512816e-06,
+ "loss": 0.5142,
+ "step": 568
+ },
+ {
+ "epoch": 1.779749478079332,
+ "grad_norm": 0.7583725452423096,
+ "learning_rate": 4.219682157386884e-06,
+ "loss": 0.5584,
+ "step": 569
+ },
+ {
+ "epoch": 1.7828810020876826,
+ "grad_norm": 0.787811279296875,
+ "learning_rate": 4.21653698635693e-06,
+ "loss": 0.5068,
+ "step": 570
+ },
+ {
+ "epoch": 1.7860125260960333,
+ "grad_norm": 0.8298993110656738,
+ "learning_rate": 4.213386666856375e-06,
+ "loss": 0.5496,
+ "step": 571
+ },
+ {
+ "epoch": 1.789144050104384,
+ "grad_norm": 0.8999841213226318,
+ "learning_rate": 4.210231208334087e-06,
+ "loss": 0.5454,
+ "step": 572
+ },
+ {
+ "epoch": 1.7922755741127347,
+ "grad_norm": 4.264521598815918,
+ "learning_rate": 4.207070620254345e-06,
+ "loss": 0.5486,
+ "step": 573
+ },
+ {
+ "epoch": 1.7954070981210855,
+ "grad_norm": 0.8517448306083679,
+ "learning_rate": 4.203904912096812e-06,
+ "loss": 0.5566,
+ "step": 574
+ },
+ {
+ "epoch": 1.7985386221294362,
+ "grad_norm": 0.9230182766914368,
+ "learning_rate": 4.200734093356511e-06,
+ "loss": 0.4964,
+ "step": 575
+ },
+ {
+ "epoch": 1.801670146137787,
+ "grad_norm": 1.224039912223816,
+ "learning_rate": 4.197558173543791e-06,
+ "loss": 0.5356,
+ "step": 576
+ },
+ {
+ "epoch": 1.8048016701461378,
+ "grad_norm": 0.9998573660850525,
+ "learning_rate": 4.194377162184301e-06,
+ "loss": 0.5334,
+ "step": 577
+ },
+ {
+ "epoch": 1.8079331941544885,
+ "grad_norm": 0.865521252155304,
+ "learning_rate": 4.191191068818963e-06,
+ "loss": 0.5036,
+ "step": 578
+ },
+ {
+ "epoch": 1.8110647181628392,
+ "grad_norm": 0.8048138618469238,
+ "learning_rate": 4.18799990300394e-06,
+ "loss": 0.4979,
+ "step": 579
+ },
+ {
+ "epoch": 1.8141962421711901,
+ "grad_norm": 0.717815637588501,
+ "learning_rate": 4.184803674310609e-06,
+ "loss": 0.5623,
+ "step": 580
+ },
+ {
+ "epoch": 1.8173277661795408,
+ "grad_norm": 0.8403327465057373,
+ "learning_rate": 4.1816023923255335e-06,
+ "loss": 0.5055,
+ "step": 581
+ },
+ {
+ "epoch": 1.8204592901878915,
+ "grad_norm": 0.7298995852470398,
+ "learning_rate": 4.178396066650432e-06,
+ "loss": 0.5641,
+ "step": 582
+ },
+ {
+ "epoch": 1.8235908141962422,
+ "grad_norm": 0.9469727873802185,
+ "learning_rate": 4.1751847069021516e-06,
+ "loss": 0.5557,
+ "step": 583
+ },
+ {
+ "epoch": 1.826722338204593,
+ "grad_norm": 0.8641784191131592,
+ "learning_rate": 4.1719683227126386e-06,
+ "loss": 0.5153,
+ "step": 584
+ },
+ {
+ "epoch": 1.8298538622129437,
+ "grad_norm": 0.7316668629646301,
+ "learning_rate": 4.168746923728908e-06,
+ "loss": 0.4988,
+ "step": 585
+ },
+ {
+ "epoch": 1.8329853862212944,
+ "grad_norm": 0.8795468807220459,
+ "learning_rate": 4.165520519613017e-06,
+ "loss": 0.5483,
+ "step": 586
+ },
+ {
+ "epoch": 1.836116910229645,
+ "grad_norm": 0.7323560118675232,
+ "learning_rate": 4.162289120042034e-06,
+ "loss": 0.5194,
+ "step": 587
+ },
+ {
+ "epoch": 1.8392484342379958,
+ "grad_norm": 0.8217021822929382,
+ "learning_rate": 4.159052734708013e-06,
+ "loss": 0.532,
+ "step": 588
+ },
+ {
+ "epoch": 1.8423799582463465,
+ "grad_norm": 0.7669674754142761,
+ "learning_rate": 4.155811373317958e-06,
+ "loss": 0.541,
+ "step": 589
+ },
+ {
+ "epoch": 1.8455114822546972,
+ "grad_norm": 0.8312156200408936,
+ "learning_rate": 4.152565045593801e-06,
+ "loss": 0.5298,
+ "step": 590
+ },
+ {
+ "epoch": 1.848643006263048,
+ "grad_norm": 0.8967565298080444,
+ "learning_rate": 4.1493137612723665e-06,
+ "loss": 0.51,
+ "step": 591
+ },
+ {
+ "epoch": 1.8517745302713986,
+ "grad_norm": 0.8706664443016052,
+ "learning_rate": 4.14605753010535e-06,
+ "loss": 0.4941,
+ "step": 592
+ },
+ {
+ "epoch": 1.8549060542797495,
+ "grad_norm": 0.7585753798484802,
+ "learning_rate": 4.14279636185928e-06,
+ "loss": 0.5161,
+ "step": 593
+ },
+ {
+ "epoch": 1.8580375782881002,
+ "grad_norm": 0.7495241165161133,
+ "learning_rate": 4.1395302663154954e-06,
+ "loss": 0.5388,
+ "step": 594
+ },
+ {
+ "epoch": 1.861169102296451,
+ "grad_norm": 1.0746862888336182,
+ "learning_rate": 4.136259253270114e-06,
+ "loss": 0.4976,
+ "step": 595
+ },
+ {
+ "epoch": 1.8643006263048016,
+ "grad_norm": 0.872309684753418,
+ "learning_rate": 4.132983332534e-06,
+ "loss": 0.559,
+ "step": 596
+ },
+ {
+ "epoch": 1.8674321503131524,
+ "grad_norm": 0.8759891986846924,
+ "learning_rate": 4.1297025139327405e-06,
+ "loss": 0.5436,
+ "step": 597
+ },
+ {
+ "epoch": 1.8705636743215033,
+ "grad_norm": 1.1044493913650513,
+ "learning_rate": 4.126416807306611e-06,
+ "loss": 0.5476,
+ "step": 598
+ },
+ {
+ "epoch": 1.873695198329854,
+ "grad_norm": 0.8340442180633545,
+ "learning_rate": 4.123126222510549e-06,
+ "loss": 0.4592,
+ "step": 599
+ },
+ {
+ "epoch": 1.8768267223382047,
+ "grad_norm": 0.8331449031829834,
+ "learning_rate": 4.119830769414123e-06,
+ "loss": 0.5219,
+ "step": 600
+ },
+ {
+ "epoch": 1.8799582463465554,
+ "grad_norm": 1.0862973928451538,
+ "learning_rate": 4.116530457901503e-06,
+ "loss": 0.5159,
+ "step": 601
+ },
+ {
+ "epoch": 1.883089770354906,
+ "grad_norm": 0.8524414300918579,
+ "learning_rate": 4.113225297871431e-06,
+ "loss": 0.5502,
+ "step": 602
+ },
+ {
+ "epoch": 1.8862212943632568,
+ "grad_norm": 1.4945416450500488,
+ "learning_rate": 4.10991529923719e-06,
+ "loss": 0.5627,
+ "step": 603
+ },
+ {
+ "epoch": 1.8893528183716075,
+ "grad_norm": 1.5518157482147217,
+ "learning_rate": 4.10660047192658e-06,
+ "loss": 0.5517,
+ "step": 604
+ },
+ {
+ "epoch": 1.8924843423799582,
+ "grad_norm": 2.56638765335083,
+ "learning_rate": 4.103280825881878e-06,
+ "loss": 0.5422,
+ "step": 605
+ },
+ {
+ "epoch": 1.895615866388309,
+ "grad_norm": 0.867254912853241,
+ "learning_rate": 4.099956371059817e-06,
+ "loss": 0.4991,
+ "step": 606
+ },
+ {
+ "epoch": 1.8987473903966596,
+ "grad_norm": 0.9555892944335938,
+ "learning_rate": 4.096627117431554e-06,
+ "loss": 0.5339,
+ "step": 607
+ },
+ {
+ "epoch": 1.9018789144050103,
+ "grad_norm": 0.7905483245849609,
+ "learning_rate": 4.093293074982638e-06,
+ "loss": 0.5168,
+ "step": 608
+ },
+ {
+ "epoch": 1.905010438413361,
+ "grad_norm": 0.7500227093696594,
+ "learning_rate": 4.089954253712981e-06,
+ "loss": 0.5096,
+ "step": 609
+ },
+ {
+ "epoch": 1.9081419624217117,
+ "grad_norm": 0.8458324074745178,
+ "learning_rate": 4.086610663636828e-06,
+ "loss": 0.5296,
+ "step": 610
+ },
+ {
+ "epoch": 1.9112734864300627,
+ "grad_norm": 0.7392706871032715,
+ "learning_rate": 4.08326231478273e-06,
+ "loss": 0.5305,
+ "step": 611
+ },
+ {
+ "epoch": 1.9144050104384134,
+ "grad_norm": 0.8113343715667725,
+ "learning_rate": 4.079909217193508e-06,
+ "loss": 0.5044,
+ "step": 612
+ },
+ {
+ "epoch": 1.917536534446764,
+ "grad_norm": 0.7637801766395569,
+ "learning_rate": 4.076551380926226e-06,
+ "loss": 0.5298,
+ "step": 613
+ },
+ {
+ "epoch": 1.9206680584551148,
+ "grad_norm": 1.0523375272750854,
+ "learning_rate": 4.073188816052164e-06,
+ "loss": 0.5111,
+ "step": 614
+ },
+ {
+ "epoch": 1.9237995824634657,
+ "grad_norm": 0.8224868774414062,
+ "learning_rate": 4.069821532656781e-06,
+ "loss": 0.5178,
+ "step": 615
+ },
+ {
+ "epoch": 1.9269311064718164,
+ "grad_norm": 0.7270777821540833,
+ "learning_rate": 4.066449540839693e-06,
+ "loss": 0.5307,
+ "step": 616
+ },
+ {
+ "epoch": 1.9300626304801671,
+ "grad_norm": 0.7214602828025818,
+ "learning_rate": 4.063072850714631e-06,
+ "loss": 0.5171,
+ "step": 617
+ },
+ {
+ "epoch": 1.9331941544885178,
+ "grad_norm": 0.7333671450614929,
+ "learning_rate": 4.059691472409426e-06,
+ "loss": 0.56,
+ "step": 618
+ },
+ {
+ "epoch": 1.9363256784968685,
+ "grad_norm": 0.9166824221611023,
+ "learning_rate": 4.056305416065964e-06,
+ "loss": 0.5388,
+ "step": 619
+ },
+ {
+ "epoch": 1.9394572025052192,
+ "grad_norm": 0.7743303775787354,
+ "learning_rate": 4.052914691840167e-06,
+ "loss": 0.5134,
+ "step": 620
+ },
+ {
+ "epoch": 1.94258872651357,
+ "grad_norm": 0.704097330570221,
+ "learning_rate": 4.0495193099019524e-06,
+ "loss": 0.4926,
+ "step": 621
+ },
+ {
+ "epoch": 1.9457202505219207,
+ "grad_norm": 0.8508503437042236,
+ "learning_rate": 4.046119280435212e-06,
+ "loss": 0.5008,
+ "step": 622
+ },
+ {
+ "epoch": 1.9488517745302714,
+ "grad_norm": 0.725933313369751,
+ "learning_rate": 4.042714613637775e-06,
+ "loss": 0.5549,
+ "step": 623
+ },
+ {
+ "epoch": 1.951983298538622,
+ "grad_norm": 0.8919175863265991,
+ "learning_rate": 4.039305319721381e-06,
+ "loss": 0.5183,
+ "step": 624
+ },
+ {
+ "epoch": 1.9551148225469728,
+ "grad_norm": 0.827919065952301,
+ "learning_rate": 4.035891408911644e-06,
+ "loss": 0.5624,
+ "step": 625
+ },
+ {
+ "epoch": 1.9582463465553235,
+ "grad_norm": 0.7415187358856201,
+ "learning_rate": 4.032472891448032e-06,
+ "loss": 0.5454,
+ "step": 626
+ },
+ {
+ "epoch": 1.9613778705636742,
+ "grad_norm": 0.7675788998603821,
+ "learning_rate": 4.029049777583824e-06,
+ "loss": 0.5361,
+ "step": 627
+ },
+ {
+ "epoch": 1.964509394572025,
+ "grad_norm": 0.8464030623435974,
+ "learning_rate": 4.025622077586088e-06,
+ "loss": 0.5295,
+ "step": 628
+ },
+ {
+ "epoch": 1.9676409185803758,
+ "grad_norm": 0.7641633749008179,
+ "learning_rate": 4.022189801735646e-06,
+ "loss": 0.55,
+ "step": 629
+ },
+ {
+ "epoch": 1.9707724425887265,
+ "grad_norm": 0.7813227772712708,
+ "learning_rate": 4.018752960327048e-06,
+ "loss": 0.5587,
+ "step": 630
+ },
+ {
+ "epoch": 1.9739039665970772,
+ "grad_norm": 0.7576701641082764,
+ "learning_rate": 4.015311563668533e-06,
+ "loss": 0.5413,
+ "step": 631
+ },
+ {
+ "epoch": 1.977035490605428,
+ "grad_norm": 0.6949650049209595,
+ "learning_rate": 4.011865622082004e-06,
+ "loss": 0.5344,
+ "step": 632
+ },
+ {
+ "epoch": 1.9801670146137789,
+ "grad_norm": 0.9009145498275757,
+ "learning_rate": 4.008415145902997e-06,
+ "loss": 0.5233,
+ "step": 633
+ },
+ {
+ "epoch": 1.9832985386221296,
+ "grad_norm": 0.7635822892189026,
+ "learning_rate": 4.004960145480651e-06,
+ "loss": 0.4981,
+ "step": 634
+ },
+ {
+ "epoch": 1.9864300626304803,
+ "grad_norm": 0.8916334509849548,
+ "learning_rate": 4.0015006311776685e-06,
+ "loss": 0.5311,
+ "step": 635
+ },
+ {
+ "epoch": 1.989561586638831,
+ "grad_norm": 0.7197673320770264,
+ "learning_rate": 3.998036613370295e-06,
+ "loss": 0.5361,
+ "step": 636
+ },
+ {
+ "epoch": 1.9926931106471817,
+ "grad_norm": 0.8391228914260864,
+ "learning_rate": 3.994568102448284e-06,
+ "loss": 0.5473,
+ "step": 637
+ },
+ {
+ "epoch": 1.9958246346555324,
+ "grad_norm": 0.9371750950813293,
+ "learning_rate": 3.991095108814862e-06,
+ "loss": 0.5303,
+ "step": 638
+ },
+ {
+ "epoch": 1.998956158663883,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9876176428867046e-06,
+ "loss": 0.533,
+ "step": 639
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.8929619789123535,
+ "learning_rate": 3.9841357150938984e-06,
+ "loss": 0.1831,
+ "step": 640
+ },
+ {
+ "epoch": 2.0031315240083507,
+ "grad_norm": 0.8802503347396851,
+ "learning_rate": 3.9806493358799135e-06,
+ "loss": 0.493,
+ "step": 641
+ },
+ {
+ "epoch": 2.0062630480167014,
+ "grad_norm": 0.802759051322937,
+ "learning_rate": 3.977158515701571e-06,
+ "loss": 0.498,
+ "step": 642
+ },
+ {
+ "epoch": 2.009394572025052,
+ "grad_norm": 1.0235401391983032,
+ "learning_rate": 3.973663265029013e-06,
+ "loss": 0.4887,
+ "step": 643
+ },
+ {
+ "epoch": 2.012526096033403,
+ "grad_norm": 0.7219089865684509,
+ "learning_rate": 3.97016359434567e-06,
+ "loss": 0.4628,
+ "step": 644
+ },
+ {
+ "epoch": 2.0156576200417535,
+ "grad_norm": 0.7887073755264282,
+ "learning_rate": 3.966659514148229e-06,
+ "loss": 0.525,
+ "step": 645
+ },
+ {
+ "epoch": 2.0187891440501042,
+ "grad_norm": 0.7960914969444275,
+ "learning_rate": 3.963151034946602e-06,
+ "loss": 0.4643,
+ "step": 646
+ },
+ {
+ "epoch": 2.021920668058455,
+ "grad_norm": 0.7902271151542664,
+ "learning_rate": 3.959638167263895e-06,
+ "loss": 0.4922,
+ "step": 647
+ },
+ {
+ "epoch": 2.0250521920668056,
+ "grad_norm": 0.9501478672027588,
+ "learning_rate": 3.956120921636379e-06,
+ "loss": 0.5285,
+ "step": 648
+ },
+ {
+ "epoch": 2.028183716075157,
+ "grad_norm": 0.9510527849197388,
+ "learning_rate": 3.952599308613454e-06,
+ "loss": 0.4909,
+ "step": 649
+ },
+ {
+ "epoch": 2.0313152400835075,
+ "grad_norm": 0.9408219456672668,
+ "learning_rate": 3.949073338757619e-06,
+ "loss": 0.4912,
+ "step": 650
+ },
+ {
+ "epoch": 2.034446764091858,
+ "grad_norm": 0.7148041725158691,
+ "learning_rate": 3.945543022644441e-06,
+ "loss": 0.4792,
+ "step": 651
+ },
+ {
+ "epoch": 2.037578288100209,
+ "grad_norm": 0.7737464904785156,
+ "learning_rate": 3.942008370862522e-06,
+ "loss": 0.4694,
+ "step": 652
+ },
+ {
+ "epoch": 2.0407098121085596,
+ "grad_norm": 0.8405889868736267,
+ "learning_rate": 3.938469394013472e-06,
+ "loss": 0.5048,
+ "step": 653
+ },
+ {
+ "epoch": 2.0438413361169103,
+ "grad_norm": 0.7896456718444824,
+ "learning_rate": 3.934926102711869e-06,
+ "loss": 0.4882,
+ "step": 654
+ },
+ {
+ "epoch": 2.046972860125261,
+ "grad_norm": 0.9290387034416199,
+ "learning_rate": 3.931378507585231e-06,
+ "loss": 0.503,
+ "step": 655
+ },
+ {
+ "epoch": 2.0501043841336117,
+ "grad_norm": 0.7386118769645691,
+ "learning_rate": 3.927826619273991e-06,
+ "loss": 0.4918,
+ "step": 656
+ },
+ {
+ "epoch": 2.0532359081419624,
+ "grad_norm": 0.9878676533699036,
+ "learning_rate": 3.92427044843145e-06,
+ "loss": 0.4958,
+ "step": 657
+ },
+ {
+ "epoch": 2.056367432150313,
+ "grad_norm": 1.0111151933670044,
+ "learning_rate": 3.92071000572376e-06,
+ "loss": 0.4886,
+ "step": 658
+ },
+ {
+ "epoch": 2.059498956158664,
+ "grad_norm": 0.8612061738967896,
+ "learning_rate": 3.917145301829884e-06,
+ "loss": 0.5216,
+ "step": 659
+ },
+ {
+ "epoch": 2.0626304801670146,
+ "grad_norm": 0.7458518743515015,
+ "learning_rate": 3.913576347441564e-06,
+ "loss": 0.4807,
+ "step": 660
+ },
+ {
+ "epoch": 2.0657620041753653,
+ "grad_norm": 0.7775886058807373,
+ "learning_rate": 3.910003153263294e-06,
+ "loss": 0.4837,
+ "step": 661
+ },
+ {
+ "epoch": 2.068893528183716,
+ "grad_norm": 0.7144196629524231,
+ "learning_rate": 3.906425730012282e-06,
+ "loss": 0.5081,
+ "step": 662
+ },
+ {
+ "epoch": 2.0720250521920667,
+ "grad_norm": 0.844971776008606,
+ "learning_rate": 3.9028440884184215e-06,
+ "loss": 0.474,
+ "step": 663
+ },
+ {
+ "epoch": 2.0751565762004174,
+ "grad_norm": 0.9709283113479614,
+ "learning_rate": 3.899258239224257e-06,
+ "loss": 0.503,
+ "step": 664
+ },
+ {
+ "epoch": 2.078288100208768,
+ "grad_norm": 1.1325515508651733,
+ "learning_rate": 3.895668193184954e-06,
+ "loss": 0.5058,
+ "step": 665
+ },
+ {
+ "epoch": 2.081419624217119,
+ "grad_norm": 0.7211254239082336,
+ "learning_rate": 3.892073961068266e-06,
+ "loss": 0.4982,
+ "step": 666
+ },
+ {
+ "epoch": 2.08455114822547,
+ "grad_norm": 0.8975517153739929,
+ "learning_rate": 3.888475553654502e-06,
+ "loss": 0.4699,
+ "step": 667
+ },
+ {
+ "epoch": 2.0876826722338206,
+ "grad_norm": 0.8270771503448486,
+ "learning_rate": 3.884872981736493e-06,
+ "loss": 0.4586,
+ "step": 668
+ },
+ {
+ "epoch": 2.0908141962421714,
+ "grad_norm": 0.8606625199317932,
+ "learning_rate": 3.881266256119561e-06,
+ "loss": 0.5299,
+ "step": 669
+ },
+ {
+ "epoch": 2.093945720250522,
+ "grad_norm": 0.9013976454734802,
+ "learning_rate": 3.877655387621488e-06,
+ "loss": 0.4887,
+ "step": 670
+ },
+ {
+ "epoch": 2.0970772442588728,
+ "grad_norm": 0.7603903412818909,
+ "learning_rate": 3.8740403870724795e-06,
+ "loss": 0.4992,
+ "step": 671
+ },
+ {
+ "epoch": 2.1002087682672235,
+ "grad_norm": 1.0432350635528564,
+ "learning_rate": 3.870421265315137e-06,
+ "loss": 0.5035,
+ "step": 672
+ },
+ {
+ "epoch": 2.103340292275574,
+ "grad_norm": 0.7727136611938477,
+ "learning_rate": 3.8667980332044195e-06,
+ "loss": 0.5006,
+ "step": 673
+ },
+ {
+ "epoch": 2.106471816283925,
+ "grad_norm": 0.9764307141304016,
+ "learning_rate": 3.863170701607618e-06,
+ "loss": 0.5061,
+ "step": 674
+ },
+ {
+ "epoch": 2.1096033402922756,
+ "grad_norm": 0.747818648815155,
+ "learning_rate": 3.859539281404317e-06,
+ "loss": 0.4761,
+ "step": 675
+ },
+ {
+ "epoch": 2.1127348643006263,
+ "grad_norm": 0.7254915237426758,
+ "learning_rate": 3.855903783486364e-06,
+ "loss": 0.5166,
+ "step": 676
+ },
+ {
+ "epoch": 2.115866388308977,
+ "grad_norm": 0.7678592801094055,
+ "learning_rate": 3.852264218757839e-06,
+ "loss": 0.5122,
+ "step": 677
+ },
+ {
+ "epoch": 2.1189979123173277,
+ "grad_norm": 0.8140144348144531,
+ "learning_rate": 3.8486205981350165e-06,
+ "loss": 0.4551,
+ "step": 678
+ },
+ {
+ "epoch": 2.1221294363256784,
+ "grad_norm": 0.9417359232902527,
+ "learning_rate": 3.844972932546338e-06,
+ "loss": 0.4748,
+ "step": 679
+ },
+ {
+ "epoch": 2.125260960334029,
+ "grad_norm": 0.8035290241241455,
+ "learning_rate": 3.841321232932378e-06,
+ "loss": 0.5079,
+ "step": 680
+ },
+ {
+ "epoch": 2.12839248434238,
+ "grad_norm": 0.8300641775131226,
+ "learning_rate": 3.837665510245809e-06,
+ "loss": 0.5018,
+ "step": 681
+ },
+ {
+ "epoch": 2.1315240083507305,
+ "grad_norm": 0.8293547034263611,
+ "learning_rate": 3.8340057754513715e-06,
+ "loss": 0.5042,
+ "step": 682
+ },
+ {
+ "epoch": 2.1346555323590812,
+ "grad_norm": 0.7780007719993591,
+ "learning_rate": 3.8303420395258365e-06,
+ "loss": 0.5048,
+ "step": 683
+ },
+ {
+ "epoch": 2.137787056367432,
+ "grad_norm": 0.7519420385360718,
+ "learning_rate": 3.8266743134579785e-06,
+ "loss": 0.5108,
+ "step": 684
+ },
+ {
+ "epoch": 2.140918580375783,
+ "grad_norm": 0.7872384190559387,
+ "learning_rate": 3.8230026082485404e-06,
+ "loss": 0.4924,
+ "step": 685
+ },
+ {
+ "epoch": 2.144050104384134,
+ "grad_norm": 0.7479491829872131,
+ "learning_rate": 3.819326934910197e-06,
+ "loss": 0.5184,
+ "step": 686
+ },
+ {
+ "epoch": 2.1471816283924845,
+ "grad_norm": 0.8438299298286438,
+ "learning_rate": 3.815647304467527e-06,
+ "loss": 0.4791,
+ "step": 687
+ },
+ {
+ "epoch": 2.150313152400835,
+ "grad_norm": 0.7923721671104431,
+ "learning_rate": 3.8119637279569773e-06,
+ "loss": 0.5305,
+ "step": 688
+ },
+ {
+ "epoch": 2.153444676409186,
+ "grad_norm": 0.7856534719467163,
+ "learning_rate": 3.80827621642683e-06,
+ "loss": 0.5063,
+ "step": 689
+ },
+ {
+ "epoch": 2.1565762004175366,
+ "grad_norm": 0.8544500470161438,
+ "learning_rate": 3.8045847809371706e-06,
+ "loss": 0.4989,
+ "step": 690
+ },
+ {
+ "epoch": 2.1597077244258873,
+ "grad_norm": 0.865390956401825,
+ "learning_rate": 3.800889432559852e-06,
+ "loss": 0.4931,
+ "step": 691
+ },
+ {
+ "epoch": 2.162839248434238,
+ "grad_norm": 0.9809399247169495,
+ "learning_rate": 3.797190182378466e-06,
+ "loss": 0.4785,
+ "step": 692
+ },
+ {
+ "epoch": 2.1659707724425887,
+ "grad_norm": 0.7954536080360413,
+ "learning_rate": 3.793487041488304e-06,
+ "loss": 0.4847,
+ "step": 693
+ },
+ {
+ "epoch": 2.1691022964509394,
+ "grad_norm": 0.754704475402832,
+ "learning_rate": 3.7897800209963298e-06,
+ "loss": 0.5125,
+ "step": 694
+ },
+ {
+ "epoch": 2.17223382045929,
+ "grad_norm": 0.7319822311401367,
+ "learning_rate": 3.7860691320211414e-06,
+ "loss": 0.477,
+ "step": 695
+ },
+ {
+ "epoch": 2.175365344467641,
+ "grad_norm": 0.8198635578155518,
+ "learning_rate": 3.7823543856929403e-06,
+ "loss": 0.4764,
+ "step": 696
+ },
+ {
+ "epoch": 2.1784968684759916,
+ "grad_norm": 0.708933413028717,
+ "learning_rate": 3.7786357931534987e-06,
+ "loss": 0.4948,
+ "step": 697
+ },
+ {
+ "epoch": 2.1816283924843423,
+ "grad_norm": 0.8493193984031677,
+ "learning_rate": 3.774913365556123e-06,
+ "loss": 0.5271,
+ "step": 698
+ },
+ {
+ "epoch": 2.184759916492693,
+ "grad_norm": 0.7999475002288818,
+ "learning_rate": 3.771187114065622e-06,
+ "loss": 0.4804,
+ "step": 699
+ },
+ {
+ "epoch": 2.1878914405010437,
+ "grad_norm": 0.8366796970367432,
+ "learning_rate": 3.7674570498582776e-06,
+ "loss": 0.457,
+ "step": 700
+ },
+ {
+ "epoch": 2.1910229645093944,
+ "grad_norm": 0.7935530543327332,
+ "learning_rate": 3.7637231841218015e-06,
+ "loss": 0.5001,
+ "step": 701
+ },
+ {
+ "epoch": 2.1941544885177455,
+ "grad_norm": 0.7700153589248657,
+ "learning_rate": 3.7599855280553125e-06,
+ "loss": 0.5091,
+ "step": 702
+ },
+ {
+ "epoch": 2.1972860125260962,
+ "grad_norm": 0.7991652488708496,
+ "learning_rate": 3.756244092869294e-06,
+ "loss": 0.4955,
+ "step": 703
+ },
+ {
+ "epoch": 2.200417536534447,
+ "grad_norm": 0.720051646232605,
+ "learning_rate": 3.752498889785567e-06,
+ "loss": 0.4902,
+ "step": 704
+ },
+ {
+ "epoch": 2.2035490605427976,
+ "grad_norm": 0.7312369346618652,
+ "learning_rate": 3.748749930037252e-06,
+ "loss": 0.4935,
+ "step": 705
+ },
+ {
+ "epoch": 2.2066805845511483,
+ "grad_norm": 0.8418563008308411,
+ "learning_rate": 3.744997224868739e-06,
+ "loss": 0.5186,
+ "step": 706
+ },
+ {
+ "epoch": 2.209812108559499,
+ "grad_norm": 0.8324081301689148,
+ "learning_rate": 3.741240785535649e-06,
+ "loss": 0.482,
+ "step": 707
+ },
+ {
+ "epoch": 2.2129436325678498,
+ "grad_norm": 0.8051855564117432,
+ "learning_rate": 3.737480623304805e-06,
+ "loss": 0.4663,
+ "step": 708
+ },
+ {
+ "epoch": 2.2160751565762005,
+ "grad_norm": 0.9464184641838074,
+ "learning_rate": 3.7337167494541948e-06,
+ "loss": 0.451,
+ "step": 709
+ },
+ {
+ "epoch": 2.219206680584551,
+ "grad_norm": 1.0227075815200806,
+ "learning_rate": 3.729949175272942e-06,
+ "loss": 0.4428,
+ "step": 710
+ },
+ {
+ "epoch": 2.222338204592902,
+ "grad_norm": 0.7930364012718201,
+ "learning_rate": 3.7261779120612633e-06,
+ "loss": 0.5132,
+ "step": 711
+ },
+ {
+ "epoch": 2.2254697286012526,
+ "grad_norm": 0.9033688306808472,
+ "learning_rate": 3.7224029711304444e-06,
+ "loss": 0.476,
+ "step": 712
+ },
+ {
+ "epoch": 2.2286012526096033,
+ "grad_norm": 0.8026887774467468,
+ "learning_rate": 3.7186243638028007e-06,
+ "loss": 0.4959,
+ "step": 713
+ },
+ {
+ "epoch": 2.231732776617954,
+ "grad_norm": 0.9391745328903198,
+ "learning_rate": 3.714842101411642e-06,
+ "loss": 0.4962,
+ "step": 714
+ },
+ {
+ "epoch": 2.2348643006263047,
+ "grad_norm": 0.7774361371994019,
+ "learning_rate": 3.711056195301245e-06,
+ "loss": 0.4748,
+ "step": 715
+ },
+ {
+ "epoch": 2.2379958246346554,
+ "grad_norm": 0.9278722405433655,
+ "learning_rate": 3.7072666568268115e-06,
+ "loss": 0.5074,
+ "step": 716
+ },
+ {
+ "epoch": 2.241127348643006,
+ "grad_norm": 0.771423876285553,
+ "learning_rate": 3.7034734973544406e-06,
+ "loss": 0.5072,
+ "step": 717
+ },
+ {
+ "epoch": 2.244258872651357,
+ "grad_norm": 0.8707448244094849,
+ "learning_rate": 3.6996767282610892e-06,
+ "loss": 0.4851,
+ "step": 718
+ },
+ {
+ "epoch": 2.2473903966597075,
+ "grad_norm": 0.7641019821166992,
+ "learning_rate": 3.695876360934543e-06,
+ "loss": 0.4941,
+ "step": 719
+ },
+ {
+ "epoch": 2.2505219206680582,
+ "grad_norm": 0.7647167444229126,
+ "learning_rate": 3.69207240677338e-06,
+ "loss": 0.5225,
+ "step": 720
+ },
+ {
+ "epoch": 2.2536534446764094,
+ "grad_norm": 0.9108865261077881,
+ "learning_rate": 3.6882648771869345e-06,
+ "loss": 0.454,
+ "step": 721
+ },
+ {
+ "epoch": 2.25678496868476,
+ "grad_norm": 0.86728835105896,
+ "learning_rate": 3.6844537835952666e-06,
+ "loss": 0.4461,
+ "step": 722
+ },
+ {
+ "epoch": 2.259916492693111,
+ "grad_norm": 1.1055282354354858,
+ "learning_rate": 3.6806391374291238e-06,
+ "loss": 0.4618,
+ "step": 723
+ },
+ {
+ "epoch": 2.2630480167014615,
+ "grad_norm": 0.7591858506202698,
+ "learning_rate": 3.6768209501299116e-06,
+ "loss": 0.4901,
+ "step": 724
+ },
+ {
+ "epoch": 2.266179540709812,
+ "grad_norm": 0.7966359257698059,
+ "learning_rate": 3.6729992331496554e-06,
+ "loss": 0.5171,
+ "step": 725
+ },
+ {
+ "epoch": 2.269311064718163,
+ "grad_norm": 0.983161211013794,
+ "learning_rate": 3.6691739979509672e-06,
+ "loss": 0.4949,
+ "step": 726
+ },
+ {
+ "epoch": 2.2724425887265136,
+ "grad_norm": 0.9200037121772766,
+ "learning_rate": 3.6653452560070106e-06,
+ "loss": 0.5234,
+ "step": 727
+ },
+ {
+ "epoch": 2.2755741127348643,
+ "grad_norm": 1.0288461446762085,
+ "learning_rate": 3.6615130188014685e-06,
+ "loss": 0.4713,
+ "step": 728
+ },
+ {
+ "epoch": 2.278705636743215,
+ "grad_norm": 0.7325463891029358,
+ "learning_rate": 3.6576772978285065e-06,
+ "loss": 0.527,
+ "step": 729
+ },
+ {
+ "epoch": 2.2818371607515657,
+ "grad_norm": 1.0045446157455444,
+ "learning_rate": 3.6538381045927395e-06,
+ "loss": 0.5139,
+ "step": 730
+ },
+ {
+ "epoch": 2.2849686847599164,
+ "grad_norm": 0.7391849756240845,
+ "learning_rate": 3.6499954506091963e-06,
+ "loss": 0.4829,
+ "step": 731
+ },
+ {
+ "epoch": 2.288100208768267,
+ "grad_norm": 0.7808229923248291,
+ "learning_rate": 3.646149347403286e-06,
+ "loss": 0.4831,
+ "step": 732
+ },
+ {
+ "epoch": 2.291231732776618,
+ "grad_norm": 0.7056961059570312,
+ "learning_rate": 3.6422998065107628e-06,
+ "loss": 0.5066,
+ "step": 733
+ },
+ {
+ "epoch": 2.2943632567849686,
+ "grad_norm": 0.7498443126678467,
+ "learning_rate": 3.6384468394776935e-06,
+ "loss": 0.4724,
+ "step": 734
+ },
+ {
+ "epoch": 2.2974947807933193,
+ "grad_norm": 0.8511576056480408,
+ "learning_rate": 3.634590457860418e-06,
+ "loss": 0.5286,
+ "step": 735
+ },
+ {
+ "epoch": 2.30062630480167,
+ "grad_norm": 0.873635470867157,
+ "learning_rate": 3.63073067322552e-06,
+ "loss": 0.4751,
+ "step": 736
+ },
+ {
+ "epoch": 2.3037578288100207,
+ "grad_norm": 0.7427377104759216,
+ "learning_rate": 3.626867497149788e-06,
+ "loss": 0.475,
+ "step": 737
+ },
+ {
+ "epoch": 2.306889352818372,
+ "grad_norm": 1.0591017007827759,
+ "learning_rate": 3.623000941220186e-06,
+ "loss": 0.4591,
+ "step": 738
+ },
+ {
+ "epoch": 2.3100208768267225,
+ "grad_norm": 0.8767879009246826,
+ "learning_rate": 3.6191310170338114e-06,
+ "loss": 0.4673,
+ "step": 739
+ },
+ {
+ "epoch": 2.3131524008350732,
+ "grad_norm": 0.9156234860420227,
+ "learning_rate": 3.615257736197866e-06,
+ "loss": 0.4622,
+ "step": 740
+ },
+ {
+ "epoch": 2.316283924843424,
+ "grad_norm": 0.6743756532669067,
+ "learning_rate": 3.611381110329619e-06,
+ "loss": 0.4723,
+ "step": 741
+ },
+ {
+ "epoch": 2.3194154488517746,
+ "grad_norm": 0.8655558228492737,
+ "learning_rate": 3.6075011510563732e-06,
+ "loss": 0.471,
+ "step": 742
+ },
+ {
+ "epoch": 2.3225469728601253,
+ "grad_norm": 0.7652033567428589,
+ "learning_rate": 3.603617870015429e-06,
+ "loss": 0.5155,
+ "step": 743
+ },
+ {
+ "epoch": 2.325678496868476,
+ "grad_norm": 0.7970699667930603,
+ "learning_rate": 3.599731278854049e-06,
+ "loss": 0.4507,
+ "step": 744
+ },
+ {
+ "epoch": 2.3288100208768268,
+ "grad_norm": 0.7538278698921204,
+ "learning_rate": 3.5958413892294253e-06,
+ "loss": 0.5093,
+ "step": 745
+ },
+ {
+ "epoch": 2.3319415448851775,
+ "grad_norm": 0.735996663570404,
+ "learning_rate": 3.5919482128086414e-06,
+ "loss": 0.5008,
+ "step": 746
+ },
+ {
+ "epoch": 2.335073068893528,
+ "grad_norm": 0.7643904685974121,
+ "learning_rate": 3.588051761268642e-06,
+ "loss": 0.5072,
+ "step": 747
+ },
+ {
+ "epoch": 2.338204592901879,
+ "grad_norm": 0.7646260857582092,
+ "learning_rate": 3.584152046296191e-06,
+ "loss": 0.4578,
+ "step": 748
+ },
+ {
+ "epoch": 2.3413361169102296,
+ "grad_norm": 0.7873825430870056,
+ "learning_rate": 3.5802490795878446e-06,
+ "loss": 0.5249,
+ "step": 749
+ },
+ {
+ "epoch": 2.3444676409185803,
+ "grad_norm": 1.095333218574524,
+ "learning_rate": 3.5763428728499095e-06,
+ "loss": 0.4913,
+ "step": 750
+ },
+ {
+ "epoch": 2.347599164926931,
+ "grad_norm": 1.3425395488739014,
+ "learning_rate": 3.5724334377984107e-06,
+ "loss": 0.5317,
+ "step": 751
+ },
+ {
+ "epoch": 2.3507306889352817,
+ "grad_norm": 0.7151113748550415,
+ "learning_rate": 3.568520786159055e-06,
+ "loss": 0.5135,
+ "step": 752
+ },
+ {
+ "epoch": 2.3538622129436324,
+ "grad_norm": 0.8072878122329712,
+ "learning_rate": 3.5646049296672004e-06,
+ "loss": 0.4863,
+ "step": 753
+ },
+ {
+ "epoch": 2.356993736951983,
+ "grad_norm": 0.8040189743041992,
+ "learning_rate": 3.5606858800678123e-06,
+ "loss": 0.4668,
+ "step": 754
+ },
+ {
+ "epoch": 2.3601252609603343,
+ "grad_norm": 0.7749765515327454,
+ "learning_rate": 3.5567636491154385e-06,
+ "loss": 0.4681,
+ "step": 755
+ },
+ {
+ "epoch": 2.3632567849686845,
+ "grad_norm": 0.773013710975647,
+ "learning_rate": 3.5528382485741638e-06,
+ "loss": 0.5012,
+ "step": 756
+ },
+ {
+ "epoch": 2.3663883089770357,
+ "grad_norm": 0.7017714381217957,
+ "learning_rate": 3.5489096902175835e-06,
+ "loss": 0.5019,
+ "step": 757
+ },
+ {
+ "epoch": 2.3695198329853864,
+ "grad_norm": 1.132458209991455,
+ "learning_rate": 3.5449779858287625e-06,
+ "loss": 0.5131,
+ "step": 758
+ },
+ {
+ "epoch": 2.372651356993737,
+ "grad_norm": 0.7624574899673462,
+ "learning_rate": 3.541043147200202e-06,
+ "loss": 0.4856,
+ "step": 759
+ },
+ {
+ "epoch": 2.375782881002088,
+ "grad_norm": 0.9078478217124939,
+ "learning_rate": 3.5371051861338036e-06,
+ "loss": 0.4337,
+ "step": 760
+ },
+ {
+ "epoch": 2.3789144050104385,
+ "grad_norm": 0.8608354330062866,
+ "learning_rate": 3.5331641144408344e-06,
+ "loss": 0.5053,
+ "step": 761
+ },
+ {
+ "epoch": 2.382045929018789,
+ "grad_norm": 0.775047779083252,
+ "learning_rate": 3.529219943941892e-06,
+ "loss": 0.4779,
+ "step": 762
+ },
+ {
+ "epoch": 2.38517745302714,
+ "grad_norm": 0.7775866389274597,
+ "learning_rate": 3.525272686466866e-06,
+ "loss": 0.4979,
+ "step": 763
+ },
+ {
+ "epoch": 2.3883089770354906,
+ "grad_norm": 0.9386464357376099,
+ "learning_rate": 3.521322353854908e-06,
+ "loss": 0.5222,
+ "step": 764
+ },
+ {
+ "epoch": 2.3914405010438413,
+ "grad_norm": 0.874109148979187,
+ "learning_rate": 3.517368957954391e-06,
+ "loss": 0.4681,
+ "step": 765
+ },
+ {
+ "epoch": 2.394572025052192,
+ "grad_norm": 0.824588418006897,
+ "learning_rate": 3.5134125106228766e-06,
+ "loss": 0.4955,
+ "step": 766
+ },
+ {
+ "epoch": 2.3977035490605427,
+ "grad_norm": 0.8790764808654785,
+ "learning_rate": 3.5094530237270774e-06,
+ "loss": 0.4722,
+ "step": 767
+ },
+ {
+ "epoch": 2.4008350730688934,
+ "grad_norm": 1.1399786472320557,
+ "learning_rate": 3.5054905091428253e-06,
+ "loss": 0.4771,
+ "step": 768
+ },
+ {
+ "epoch": 2.403966597077244,
+ "grad_norm": 1.2586532831192017,
+ "learning_rate": 3.50152497875503e-06,
+ "loss": 0.4849,
+ "step": 769
+ },
+ {
+ "epoch": 2.407098121085595,
+ "grad_norm": 0.7706464529037476,
+ "learning_rate": 3.4975564444576487e-06,
+ "loss": 0.477,
+ "step": 770
+ },
+ {
+ "epoch": 2.4102296450939455,
+ "grad_norm": 0.7695909142494202,
+ "learning_rate": 3.4935849181536484e-06,
+ "loss": 0.4695,
+ "step": 771
+ },
+ {
+ "epoch": 2.4133611691022967,
+ "grad_norm": 0.7744433283805847,
+ "learning_rate": 3.489610411754969e-06,
+ "loss": 0.499,
+ "step": 772
+ },
+ {
+ "epoch": 2.416492693110647,
+ "grad_norm": 0.9265744686126709,
+ "learning_rate": 3.48563293718249e-06,
+ "loss": 0.481,
+ "step": 773
+ },
+ {
+ "epoch": 2.419624217118998,
+ "grad_norm": 1.0680506229400635,
+ "learning_rate": 3.481652506365992e-06,
+ "loss": 0.4898,
+ "step": 774
+ },
+ {
+ "epoch": 2.422755741127349,
+ "grad_norm": 0.721493661403656,
+ "learning_rate": 3.477669131244122e-06,
+ "loss": 0.4813,
+ "step": 775
+ },
+ {
+ "epoch": 2.4258872651356995,
+ "grad_norm": 0.7993559837341309,
+ "learning_rate": 3.4736828237643616e-06,
+ "loss": 0.5179,
+ "step": 776
+ },
+ {
+ "epoch": 2.4290187891440502,
+ "grad_norm": 0.8148090839385986,
+ "learning_rate": 3.4696935958829837e-06,
+ "loss": 0.4753,
+ "step": 777
+ },
+ {
+ "epoch": 2.432150313152401,
+ "grad_norm": 0.8006406426429749,
+ "learning_rate": 3.465701459565022e-06,
+ "loss": 0.501,
+ "step": 778
+ },
+ {
+ "epoch": 2.4352818371607516,
+ "grad_norm": 0.9307970404624939,
+ "learning_rate": 3.4617064267842327e-06,
+ "loss": 0.487,
+ "step": 779
+ },
+ {
+ "epoch": 2.4384133611691023,
+ "grad_norm": 0.7192814946174622,
+ "learning_rate": 3.45770850952306e-06,
+ "loss": 0.4769,
+ "step": 780
+ },
+ {
+ "epoch": 2.441544885177453,
+ "grad_norm": 0.7386271953582764,
+ "learning_rate": 3.4537077197726023e-06,
+ "loss": 0.4726,
+ "step": 781
+ },
+ {
+ "epoch": 2.4446764091858038,
+ "grad_norm": 0.8006314039230347,
+ "learning_rate": 3.449704069532567e-06,
+ "loss": 0.494,
+ "step": 782
+ },
+ {
+ "epoch": 2.4478079331941545,
+ "grad_norm": 0.7466752529144287,
+ "learning_rate": 3.4456975708112477e-06,
+ "loss": 0.4778,
+ "step": 783
+ },
+ {
+ "epoch": 2.450939457202505,
+ "grad_norm": 0.8348856568336487,
+ "learning_rate": 3.4416882356254777e-06,
+ "loss": 0.4766,
+ "step": 784
+ },
+ {
+ "epoch": 2.454070981210856,
+ "grad_norm": 0.754851758480072,
+ "learning_rate": 3.4376760760005994e-06,
+ "loss": 0.4673,
+ "step": 785
+ },
+ {
+ "epoch": 2.4572025052192066,
+ "grad_norm": 0.7854018807411194,
+ "learning_rate": 3.433661103970427e-06,
+ "loss": 0.4954,
+ "step": 786
+ },
+ {
+ "epoch": 2.4603340292275573,
+ "grad_norm": 0.7238256931304932,
+ "learning_rate": 3.4296433315772084e-06,
+ "loss": 0.496,
+ "step": 787
+ },
+ {
+ "epoch": 2.463465553235908,
+ "grad_norm": 0.7007659673690796,
+ "learning_rate": 3.4256227708715915e-06,
+ "loss": 0.4793,
+ "step": 788
+ },
+ {
+ "epoch": 2.4665970772442587,
+ "grad_norm": 0.7234371900558472,
+ "learning_rate": 3.421599433912588e-06,
+ "loss": 0.4935,
+ "step": 789
+ },
+ {
+ "epoch": 2.4697286012526094,
+ "grad_norm": 0.7537544965744019,
+ "learning_rate": 3.4175733327675355e-06,
+ "loss": 0.5194,
+ "step": 790
+ },
+ {
+ "epoch": 2.4728601252609606,
+ "grad_norm": 0.7608047127723694,
+ "learning_rate": 3.4135444795120633e-06,
+ "loss": 0.4793,
+ "step": 791
+ },
+ {
+ "epoch": 2.4759916492693113,
+ "grad_norm": 0.7847898006439209,
+ "learning_rate": 3.4095128862300542e-06,
+ "loss": 0.4877,
+ "step": 792
+ },
+ {
+ "epoch": 2.479123173277662,
+ "grad_norm": 0.8002011179924011,
+ "learning_rate": 3.405478565013609e-06,
+ "loss": 0.4927,
+ "step": 793
+ },
+ {
+ "epoch": 2.4822546972860127,
+ "grad_norm": 0.8200219869613647,
+ "learning_rate": 3.401441527963013e-06,
+ "loss": 0.4997,
+ "step": 794
+ },
+ {
+ "epoch": 2.4853862212943634,
+ "grad_norm": 0.7220162749290466,
+ "learning_rate": 3.3974017871866938e-06,
+ "loss": 0.4668,
+ "step": 795
+ },
+ {
+ "epoch": 2.488517745302714,
+ "grad_norm": 0.8022251129150391,
+ "learning_rate": 3.3933593548011912e-06,
+ "loss": 0.5179,
+ "step": 796
+ },
+ {
+ "epoch": 2.491649269311065,
+ "grad_norm": 0.7914465069770813,
+ "learning_rate": 3.389314242931115e-06,
+ "loss": 0.4943,
+ "step": 797
+ },
+ {
+ "epoch": 2.4947807933194155,
+ "grad_norm": 1.1399403810501099,
+ "learning_rate": 3.385266463709116e-06,
+ "loss": 0.4896,
+ "step": 798
+ },
+ {
+ "epoch": 2.497912317327766,
+ "grad_norm": 0.8098909854888916,
+ "learning_rate": 3.38121602927584e-06,
+ "loss": 0.4904,
+ "step": 799
+ },
+ {
+ "epoch": 2.501043841336117,
+ "grad_norm": 0.7434052228927612,
+ "learning_rate": 3.377162951779902e-06,
+ "loss": 0.4864,
+ "step": 800
+ },
+ {
+ "epoch": 2.5041753653444676,
+ "grad_norm": 0.7397809624671936,
+ "learning_rate": 3.3731072433778407e-06,
+ "loss": 0.486,
+ "step": 801
+ },
+ {
+ "epoch": 2.5073068893528183,
+ "grad_norm": 0.99027019739151,
+ "learning_rate": 3.3690489162340867e-06,
+ "loss": 0.5011,
+ "step": 802
+ },
+ {
+ "epoch": 2.510438413361169,
+ "grad_norm": 0.8443610668182373,
+ "learning_rate": 3.3649879825209246e-06,
+ "loss": 0.455,
+ "step": 803
+ },
+ {
+ "epoch": 2.5135699373695197,
+ "grad_norm": 0.755649983882904,
+ "learning_rate": 3.3609244544184604e-06,
+ "loss": 0.4563,
+ "step": 804
+ },
+ {
+ "epoch": 2.5167014613778704,
+ "grad_norm": 0.728018045425415,
+ "learning_rate": 3.3568583441145765e-06,
+ "loss": 0.471,
+ "step": 805
+ },
+ {
+ "epoch": 2.519832985386221,
+ "grad_norm": 0.7777130603790283,
+ "learning_rate": 3.352789663804904e-06,
+ "loss": 0.4667,
+ "step": 806
+ },
+ {
+ "epoch": 2.522964509394572,
+ "grad_norm": 0.7545619606971741,
+ "learning_rate": 3.3487184256927785e-06,
+ "loss": 0.4915,
+ "step": 807
+ },
+ {
+ "epoch": 2.526096033402923,
+ "grad_norm": 0.8374579548835754,
+ "learning_rate": 3.3446446419892127e-06,
+ "loss": 0.485,
+ "step": 808
+ },
+ {
+ "epoch": 2.5292275574112733,
+ "grad_norm": 0.7354666590690613,
+ "learning_rate": 3.340568324912849e-06,
+ "loss": 0.5254,
+ "step": 809
+ },
+ {
+ "epoch": 2.5323590814196244,
+ "grad_norm": 0.7581545114517212,
+ "learning_rate": 3.3364894866899324e-06,
+ "loss": 0.4483,
+ "step": 810
+ },
+ {
+ "epoch": 2.535490605427975,
+ "grad_norm": 0.8077559471130371,
+ "learning_rate": 3.3324081395542662e-06,
+ "loss": 0.5022,
+ "step": 811
+ },
+ {
+ "epoch": 2.538622129436326,
+ "grad_norm": 0.8827865719795227,
+ "learning_rate": 3.3283242957471806e-06,
+ "loss": 0.4909,
+ "step": 812
+ },
+ {
+ "epoch": 2.5417536534446765,
+ "grad_norm": 0.9139482378959656,
+ "learning_rate": 3.3242379675174953e-06,
+ "loss": 0.5205,
+ "step": 813
+ },
+ {
+ "epoch": 2.5448851774530272,
+ "grad_norm": 0.7616812586784363,
+ "learning_rate": 3.3201491671214797e-06,
+ "loss": 0.4744,
+ "step": 814
+ },
+ {
+ "epoch": 2.548016701461378,
+ "grad_norm": 0.987173318862915,
+ "learning_rate": 3.3160579068228183e-06,
+ "loss": 0.4876,
+ "step": 815
+ },
+ {
+ "epoch": 2.5511482254697286,
+ "grad_norm": 1.259137749671936,
+ "learning_rate": 3.311964198892574e-06,
+ "loss": 0.454,
+ "step": 816
+ },
+ {
+ "epoch": 2.5542797494780793,
+ "grad_norm": 0.7866336703300476,
+ "learning_rate": 3.3078680556091513e-06,
+ "loss": 0.5107,
+ "step": 817
+ },
+ {
+ "epoch": 2.55741127348643,
+ "grad_norm": 0.9311352372169495,
+ "learning_rate": 3.303769489258258e-06,
+ "loss": 0.4843,
+ "step": 818
+ },
+ {
+ "epoch": 2.5605427974947808,
+ "grad_norm": 0.8556346893310547,
+ "learning_rate": 3.299668512132872e-06,
+ "loss": 0.5017,
+ "step": 819
+ },
+ {
+ "epoch": 2.5636743215031315,
+ "grad_norm": 2.810598373413086,
+ "learning_rate": 3.2955651365331988e-06,
+ "loss": 0.5223,
+ "step": 820
+ },
+ {
+ "epoch": 2.566805845511482,
+ "grad_norm": 1.0120766162872314,
+ "learning_rate": 3.29145937476664e-06,
+ "loss": 0.4959,
+ "step": 821
+ },
+ {
+ "epoch": 2.569937369519833,
+ "grad_norm": 0.751412034034729,
+ "learning_rate": 3.287351239147752e-06,
+ "loss": 0.4941,
+ "step": 822
+ },
+ {
+ "epoch": 2.5730688935281836,
+ "grad_norm": 1.9308148622512817,
+ "learning_rate": 3.2832407419982136e-06,
+ "loss": 0.4965,
+ "step": 823
+ },
+ {
+ "epoch": 2.5762004175365343,
+ "grad_norm": 0.9215649962425232,
+ "learning_rate": 3.279127895646786e-06,
+ "loss": 0.5071,
+ "step": 824
+ },
+ {
+ "epoch": 2.5793319415448854,
+ "grad_norm": 0.7599574327468872,
+ "learning_rate": 3.2750127124292754e-06,
+ "loss": 0.5191,
+ "step": 825
+ },
+ {
+ "epoch": 2.5824634655532357,
+ "grad_norm": 0.8234940767288208,
+ "learning_rate": 3.270895204688496e-06,
+ "loss": 0.4947,
+ "step": 826
+ },
+ {
+ "epoch": 2.585594989561587,
+ "grad_norm": 0.8401572704315186,
+ "learning_rate": 3.266775384774238e-06,
+ "loss": 0.4547,
+ "step": 827
+ },
+ {
+ "epoch": 2.588726513569937,
+ "grad_norm": 0.8927991986274719,
+ "learning_rate": 3.262653265043223e-06,
+ "loss": 0.4296,
+ "step": 828
+ },
+ {
+ "epoch": 2.5918580375782883,
+ "grad_norm": 0.8009241223335266,
+ "learning_rate": 3.2585288578590716e-06,
+ "loss": 0.4578,
+ "step": 829
+ },
+ {
+ "epoch": 2.594989561586639,
+ "grad_norm": 0.7982021570205688,
+ "learning_rate": 3.2544021755922663e-06,
+ "loss": 0.4961,
+ "step": 830
+ },
+ {
+ "epoch": 2.5981210855949897,
+ "grad_norm": 0.7096095681190491,
+ "learning_rate": 3.2502732306201112e-06,
+ "loss": 0.4975,
+ "step": 831
+ },
+ {
+ "epoch": 2.6012526096033404,
+ "grad_norm": 1.1092045307159424,
+ "learning_rate": 3.246142035326699e-06,
+ "loss": 0.4705,
+ "step": 832
+ },
+ {
+ "epoch": 2.604384133611691,
+ "grad_norm": 0.785799503326416,
+ "learning_rate": 3.24200860210287e-06,
+ "loss": 0.479,
+ "step": 833
+ },
+ {
+ "epoch": 2.607515657620042,
+ "grad_norm": 0.7315773367881775,
+ "learning_rate": 3.2378729433461804e-06,
+ "loss": 0.5036,
+ "step": 834
+ },
+ {
+ "epoch": 2.6106471816283925,
+ "grad_norm": 0.7840189337730408,
+ "learning_rate": 3.233735071460856e-06,
+ "loss": 0.4967,
+ "step": 835
+ },
+ {
+ "epoch": 2.613778705636743,
+ "grad_norm": 0.7186565399169922,
+ "learning_rate": 3.2295949988577655e-06,
+ "loss": 0.4889,
+ "step": 836
+ },
+ {
+ "epoch": 2.616910229645094,
+ "grad_norm": 0.766054093837738,
+ "learning_rate": 3.2254527379543747e-06,
+ "loss": 0.539,
+ "step": 837
+ },
+ {
+ "epoch": 2.6200417536534446,
+ "grad_norm": 0.7705381512641907,
+ "learning_rate": 3.2213083011747165e-06,
+ "loss": 0.4968,
+ "step": 838
+ },
+ {
+ "epoch": 2.6231732776617953,
+ "grad_norm": 1.3530604839324951,
+ "learning_rate": 3.217161700949346e-06,
+ "loss": 0.52,
+ "step": 839
+ },
+ {
+ "epoch": 2.626304801670146,
+ "grad_norm": 0.737389862537384,
+ "learning_rate": 3.2130129497153107e-06,
+ "loss": 0.4823,
+ "step": 840
+ },
+ {
+ "epoch": 2.6294363256784967,
+ "grad_norm": 0.9121193885803223,
+ "learning_rate": 3.2088620599161064e-06,
+ "loss": 0.4592,
+ "step": 841
+ },
+ {
+ "epoch": 2.632567849686848,
+ "grad_norm": 0.8869616389274597,
+ "learning_rate": 3.2047090440016464e-06,
+ "loss": 0.5001,
+ "step": 842
+ },
+ {
+ "epoch": 2.635699373695198,
+ "grad_norm": 0.8447219133377075,
+ "learning_rate": 3.200553914428219e-06,
+ "loss": 0.4969,
+ "step": 843
+ },
+ {
+ "epoch": 2.6388308977035493,
+ "grad_norm": 0.8877657055854797,
+ "learning_rate": 3.1963966836584524e-06,
+ "loss": 0.4718,
+ "step": 844
+ },
+ {
+ "epoch": 2.6419624217118995,
+ "grad_norm": 1.045272946357727,
+ "learning_rate": 3.192237364161277e-06,
+ "loss": 0.4864,
+ "step": 845
+ },
+ {
+ "epoch": 2.6450939457202507,
+ "grad_norm": 0.8485913276672363,
+ "learning_rate": 3.1880759684118876e-06,
+ "loss": 0.4688,
+ "step": 846
+ },
+ {
+ "epoch": 2.6482254697286014,
+ "grad_norm": 0.7328930497169495,
+ "learning_rate": 3.183912508891709e-06,
+ "loss": 0.4728,
+ "step": 847
+ },
+ {
+ "epoch": 2.651356993736952,
+ "grad_norm": 0.7377315759658813,
+ "learning_rate": 3.179746998088351e-06,
+ "loss": 0.4672,
+ "step": 848
+ },
+ {
+ "epoch": 2.654488517745303,
+ "grad_norm": 0.8017002940177917,
+ "learning_rate": 3.1755794484955817e-06,
+ "loss": 0.4884,
+ "step": 849
+ },
+ {
+ "epoch": 2.6576200417536535,
+ "grad_norm": 1.045470952987671,
+ "learning_rate": 3.171409872613278e-06,
+ "loss": 0.4789,
+ "step": 850
+ },
+ {
+ "epoch": 2.6607515657620042,
+ "grad_norm": 0.8823987245559692,
+ "learning_rate": 3.1672382829473997e-06,
+ "loss": 0.5117,
+ "step": 851
+ },
+ {
+ "epoch": 2.663883089770355,
+ "grad_norm": 0.7395204901695251,
+ "learning_rate": 3.163064692009944e-06,
+ "loss": 0.5476,
+ "step": 852
+ },
+ {
+ "epoch": 2.6670146137787056,
+ "grad_norm": 0.7778941988945007,
+ "learning_rate": 3.1588891123189103e-06,
+ "loss": 0.5092,
+ "step": 853
+ },
+ {
+ "epoch": 2.6701461377870563,
+ "grad_norm": 0.8072531819343567,
+ "learning_rate": 3.1547115563982643e-06,
+ "loss": 0.4961,
+ "step": 854
+ },
+ {
+ "epoch": 2.673277661795407,
+ "grad_norm": 0.9018139243125916,
+ "learning_rate": 3.1505320367778993e-06,
+ "loss": 0.4624,
+ "step": 855
+ },
+ {
+ "epoch": 2.6764091858037578,
+ "grad_norm": 0.8554450869560242,
+ "learning_rate": 3.1463505659935957e-06,
+ "loss": 0.4971,
+ "step": 856
+ },
+ {
+ "epoch": 2.6795407098121085,
+ "grad_norm": 0.7727259397506714,
+ "learning_rate": 3.14216715658699e-06,
+ "loss": 0.4544,
+ "step": 857
+ },
+ {
+ "epoch": 2.682672233820459,
+ "grad_norm": 0.9253409504890442,
+ "learning_rate": 3.137981821105529e-06,
+ "loss": 0.4893,
+ "step": 858
+ },
+ {
+ "epoch": 2.68580375782881,
+ "grad_norm": 0.8809456825256348,
+ "learning_rate": 3.1337945721024403e-06,
+ "loss": 0.5242,
+ "step": 859
+ },
+ {
+ "epoch": 2.6889352818371606,
+ "grad_norm": 0.981755256652832,
+ "learning_rate": 3.129605422136689e-06,
+ "loss": 0.4686,
+ "step": 860
+ },
+ {
+ "epoch": 2.6920668058455117,
+ "grad_norm": 1.1278467178344727,
+ "learning_rate": 3.1254143837729412e-06,
+ "loss": 0.4813,
+ "step": 861
+ },
+ {
+ "epoch": 2.695198329853862,
+ "grad_norm": 0.8529123663902283,
+ "learning_rate": 3.1212214695815285e-06,
+ "loss": 0.4723,
+ "step": 862
+ },
+ {
+ "epoch": 2.698329853862213,
+ "grad_norm": 0.7764189839363098,
+ "learning_rate": 3.1170266921384075e-06,
+ "loss": 0.4777,
+ "step": 863
+ },
+ {
+ "epoch": 2.701461377870564,
+ "grad_norm": 0.7364740967750549,
+ "learning_rate": 3.112830064025124e-06,
+ "loss": 0.4975,
+ "step": 864
+ },
+ {
+ "epoch": 2.7045929018789145,
+ "grad_norm": 0.7594549059867859,
+ "learning_rate": 3.108631597828774e-06,
+ "loss": 0.5083,
+ "step": 865
+ },
+ {
+ "epoch": 2.7077244258872653,
+ "grad_norm": 0.7337073683738708,
+ "learning_rate": 3.104431306141968e-06,
+ "loss": 0.4778,
+ "step": 866
+ },
+ {
+ "epoch": 2.710855949895616,
+ "grad_norm": 0.7709932327270508,
+ "learning_rate": 3.1002292015627894e-06,
+ "loss": 0.4754,
+ "step": 867
+ },
+ {
+ "epoch": 2.7139874739039667,
+ "grad_norm": 0.8001313209533691,
+ "learning_rate": 3.0960252966947605e-06,
+ "loss": 0.4489,
+ "step": 868
+ },
+ {
+ "epoch": 2.7171189979123174,
+ "grad_norm": 0.8280592560768127,
+ "learning_rate": 3.091819604146804e-06,
+ "loss": 0.4606,
+ "step": 869
+ },
+ {
+ "epoch": 2.720250521920668,
+ "grad_norm": 0.7463534474372864,
+ "learning_rate": 3.0876121365332024e-06,
+ "loss": 0.5168,
+ "step": 870
+ },
+ {
+ "epoch": 2.723382045929019,
+ "grad_norm": 0.9011222124099731,
+ "learning_rate": 3.0834029064735636e-06,
+ "loss": 0.5163,
+ "step": 871
+ },
+ {
+ "epoch": 2.7265135699373695,
+ "grad_norm": 0.7811456322669983,
+ "learning_rate": 3.0791919265927827e-06,
+ "loss": 0.5004,
+ "step": 872
+ },
+ {
+ "epoch": 2.72964509394572,
+ "grad_norm": 0.9251837134361267,
+ "learning_rate": 3.0749792095210003e-06,
+ "loss": 0.5081,
+ "step": 873
+ },
+ {
+ "epoch": 2.732776617954071,
+ "grad_norm": 0.8347085118293762,
+ "learning_rate": 3.0707647678935695e-06,
+ "loss": 0.4793,
+ "step": 874
+ },
+ {
+ "epoch": 2.7359081419624216,
+ "grad_norm": 0.9766442179679871,
+ "learning_rate": 3.0665486143510153e-06,
+ "loss": 0.493,
+ "step": 875
+ },
+ {
+ "epoch": 2.7390396659707723,
+ "grad_norm": 0.7692548036575317,
+ "learning_rate": 3.0623307615389975e-06,
+ "loss": 0.4874,
+ "step": 876
+ },
+ {
+ "epoch": 2.742171189979123,
+ "grad_norm": 0.7714599370956421,
+ "learning_rate": 3.0581112221082727e-06,
+ "loss": 0.4929,
+ "step": 877
+ },
+ {
+ "epoch": 2.745302713987474,
+ "grad_norm": 0.7797786593437195,
+ "learning_rate": 3.053890008714655e-06,
+ "loss": 0.4359,
+ "step": 878
+ },
+ {
+ "epoch": 2.7484342379958244,
+ "grad_norm": 5.118397235870361,
+ "learning_rate": 3.049667134018981e-06,
+ "loss": 0.4634,
+ "step": 879
+ },
+ {
+ "epoch": 2.7515657620041756,
+ "grad_norm": 0.7684539556503296,
+ "learning_rate": 3.04544261068707e-06,
+ "loss": 0.4688,
+ "step": 880
+ },
+ {
+ "epoch": 2.754697286012526,
+ "grad_norm": 0.8678610920906067,
+ "learning_rate": 3.0412164513896846e-06,
+ "loss": 0.5213,
+ "step": 881
+ },
+ {
+ "epoch": 2.757828810020877,
+ "grad_norm": 0.80293869972229,
+ "learning_rate": 3.0369886688024954e-06,
+ "loss": 0.4392,
+ "step": 882
+ },
+ {
+ "epoch": 2.7609603340292277,
+ "grad_norm": 0.7438644766807556,
+ "learning_rate": 3.0327592756060412e-06,
+ "loss": 0.528,
+ "step": 883
+ },
+ {
+ "epoch": 2.7640918580375784,
+ "grad_norm": 0.7701645493507385,
+ "learning_rate": 3.0285282844856917e-06,
+ "loss": 0.504,
+ "step": 884
+ },
+ {
+ "epoch": 2.767223382045929,
+ "grad_norm": 0.7113856673240662,
+ "learning_rate": 3.024295708131611e-06,
+ "loss": 0.4819,
+ "step": 885
+ },
+ {
+ "epoch": 2.77035490605428,
+ "grad_norm": 1.2697532176971436,
+ "learning_rate": 3.020061559238714e-06,
+ "loss": 0.5009,
+ "step": 886
+ },
+ {
+ "epoch": 2.7734864300626305,
+ "grad_norm": 1.0299439430236816,
+ "learning_rate": 3.015825850506636e-06,
+ "loss": 0.4707,
+ "step": 887
+ },
+ {
+ "epoch": 2.776617954070981,
+ "grad_norm": 0.9703660607337952,
+ "learning_rate": 3.011588594639688e-06,
+ "loss": 0.4102,
+ "step": 888
+ },
+ {
+ "epoch": 2.779749478079332,
+ "grad_norm": 0.7357314825057983,
+ "learning_rate": 3.0073498043468247e-06,
+ "loss": 0.4649,
+ "step": 889
+ },
+ {
+ "epoch": 2.7828810020876826,
+ "grad_norm": 0.7815471291542053,
+ "learning_rate": 3.0031094923415993e-06,
+ "loss": 0.469,
+ "step": 890
+ },
+ {
+ "epoch": 2.7860125260960333,
+ "grad_norm": 0.7856019139289856,
+ "learning_rate": 2.9988676713421318e-06,
+ "loss": 0.4241,
+ "step": 891
+ },
+ {
+ "epoch": 2.789144050104384,
+ "grad_norm": 0.7668167352676392,
+ "learning_rate": 2.994624354071066e-06,
+ "loss": 0.5309,
+ "step": 892
+ },
+ {
+ "epoch": 2.7922755741127347,
+ "grad_norm": 0.7485945820808411,
+ "learning_rate": 2.990379553255535e-06,
+ "loss": 0.5173,
+ "step": 893
+ },
+ {
+ "epoch": 2.7954070981210855,
+ "grad_norm": 0.8065824508666992,
+ "learning_rate": 2.986133281627123e-06,
+ "loss": 0.4995,
+ "step": 894
+ },
+ {
+ "epoch": 2.798538622129436,
+ "grad_norm": 0.7156995534896851,
+ "learning_rate": 2.9818855519218217e-06,
+ "loss": 0.4642,
+ "step": 895
+ },
+ {
+ "epoch": 2.801670146137787,
+ "grad_norm": 0.9115403890609741,
+ "learning_rate": 2.97763637688e-06,
+ "loss": 0.4799,
+ "step": 896
+ },
+ {
+ "epoch": 2.804801670146138,
+ "grad_norm": 0.7466689944267273,
+ "learning_rate": 2.9733857692463584e-06,
+ "loss": 0.4942,
+ "step": 897
+ },
+ {
+ "epoch": 2.8079331941544883,
+ "grad_norm": 0.7484914064407349,
+ "learning_rate": 2.9691337417698974e-06,
+ "loss": 0.4618,
+ "step": 898
+ },
+ {
+ "epoch": 2.8110647181628394,
+ "grad_norm": 0.816704511642456,
+ "learning_rate": 2.9648803072038736e-06,
+ "loss": 0.4748,
+ "step": 899
+ },
+ {
+ "epoch": 2.81419624217119,
+ "grad_norm": 0.7627584934234619,
+ "learning_rate": 2.9606254783057666e-06,
+ "loss": 0.4667,
+ "step": 900
+ },
+ {
+ "epoch": 2.817327766179541,
+ "grad_norm": 0.7341011166572571,
+ "learning_rate": 2.9563692678372342e-06,
+ "loss": 0.4802,
+ "step": 901
+ },
+ {
+ "epoch": 2.8204592901878915,
+ "grad_norm": 1.2541382312774658,
+ "learning_rate": 2.952111688564082e-06,
+ "loss": 0.5231,
+ "step": 902
+ },
+ {
+ "epoch": 2.8235908141962422,
+ "grad_norm": 0.7172819375991821,
+ "learning_rate": 2.9478527532562184e-06,
+ "loss": 0.4488,
+ "step": 903
+ },
+ {
+ "epoch": 2.826722338204593,
+ "grad_norm": 0.774529218673706,
+ "learning_rate": 2.943592474687621e-06,
+ "loss": 0.4964,
+ "step": 904
+ },
+ {
+ "epoch": 2.8298538622129437,
+ "grad_norm": 0.7315672636032104,
+ "learning_rate": 2.939330865636294e-06,
+ "loss": 0.4817,
+ "step": 905
+ },
+ {
+ "epoch": 2.8329853862212944,
+ "grad_norm": 0.7698234915733337,
+ "learning_rate": 2.9350679388842347e-06,
+ "loss": 0.5075,
+ "step": 906
+ },
+ {
+ "epoch": 2.836116910229645,
+ "grad_norm": 0.7717766761779785,
+ "learning_rate": 2.93080370721739e-06,
+ "loss": 0.4789,
+ "step": 907
+ },
+ {
+ "epoch": 2.8392484342379958,
+ "grad_norm": 0.7383570075035095,
+ "learning_rate": 2.926538183425622e-06,
+ "loss": 0.4992,
+ "step": 908
+ },
+ {
+ "epoch": 2.8423799582463465,
+ "grad_norm": 0.7858864068984985,
+ "learning_rate": 2.92227138030267e-06,
+ "loss": 0.4993,
+ "step": 909
+ },
+ {
+ "epoch": 2.845511482254697,
+ "grad_norm": 0.8220369219779968,
+ "learning_rate": 2.9180033106461076e-06,
+ "loss": 0.4929,
+ "step": 910
+ },
+ {
+ "epoch": 2.848643006263048,
+ "grad_norm": 0.7507152557373047,
+ "learning_rate": 2.9137339872573086e-06,
+ "loss": 0.4394,
+ "step": 911
+ },
+ {
+ "epoch": 2.8517745302713986,
+ "grad_norm": 0.7935269474983215,
+ "learning_rate": 2.9094634229414063e-06,
+ "loss": 0.4656,
+ "step": 912
+ },
+ {
+ "epoch": 2.8549060542797493,
+ "grad_norm": 0.9187721610069275,
+ "learning_rate": 2.9051916305072576e-06,
+ "loss": 0.4918,
+ "step": 913
+ },
+ {
+ "epoch": 2.8580375782881005,
+ "grad_norm": 0.8699706792831421,
+ "learning_rate": 2.9009186227674e-06,
+ "loss": 0.5106,
+ "step": 914
+ },
+ {
+ "epoch": 2.8611691022964507,
+ "grad_norm": 0.7175673246383667,
+ "learning_rate": 2.896644412538021e-06,
+ "loss": 0.5105,
+ "step": 915
+ },
+ {
+ "epoch": 2.864300626304802,
+ "grad_norm": 0.8563990592956543,
+ "learning_rate": 2.892369012638909e-06,
+ "loss": 0.4993,
+ "step": 916
+ },
+ {
+ "epoch": 2.867432150313152,
+ "grad_norm": 0.7891882658004761,
+ "learning_rate": 2.8880924358934246e-06,
+ "loss": 0.4983,
+ "step": 917
+ },
+ {
+ "epoch": 2.8705636743215033,
+ "grad_norm": 0.9247110486030579,
+ "learning_rate": 2.8838146951284575e-06,
+ "loss": 0.4789,
+ "step": 918
+ },
+ {
+ "epoch": 2.873695198329854,
+ "grad_norm": 0.7523055672645569,
+ "learning_rate": 2.879535803174387e-06,
+ "loss": 0.4982,
+ "step": 919
+ },
+ {
+ "epoch": 2.8768267223382047,
+ "grad_norm": 0.8096909523010254,
+ "learning_rate": 2.8752557728650467e-06,
+ "loss": 0.4958,
+ "step": 920
+ },
+ {
+ "epoch": 2.8799582463465554,
+ "grad_norm": 2.3476874828338623,
+ "learning_rate": 2.870974617037684e-06,
+ "loss": 0.491,
+ "step": 921
+ },
+ {
+ "epoch": 2.883089770354906,
+ "grad_norm": 0.8388578295707703,
+ "learning_rate": 2.8666923485329224e-06,
+ "loss": 0.5275,
+ "step": 922
+ },
+ {
+ "epoch": 2.886221294363257,
+ "grad_norm": 0.8162729144096375,
+ "learning_rate": 2.8624089801947234e-06,
+ "loss": 0.4776,
+ "step": 923
+ },
+ {
+ "epoch": 2.8893528183716075,
+ "grad_norm": 0.7306103110313416,
+ "learning_rate": 2.858124524870345e-06,
+ "loss": 0.4814,
+ "step": 924
+ },
+ {
+ "epoch": 2.892484342379958,
+ "grad_norm": 0.8736817836761475,
+ "learning_rate": 2.853838995410307e-06,
+ "loss": 0.5097,
+ "step": 925
+ },
+ {
+ "epoch": 2.895615866388309,
+ "grad_norm": 0.7771823406219482,
+ "learning_rate": 2.8495524046683525e-06,
+ "loss": 0.4806,
+ "step": 926
+ },
+ {
+ "epoch": 2.8987473903966596,
+ "grad_norm": 0.9421334862709045,
+ "learning_rate": 2.845264765501404e-06,
+ "loss": 0.5055,
+ "step": 927
+ },
+ {
+ "epoch": 2.9018789144050103,
+ "grad_norm": 0.8403921127319336,
+ "learning_rate": 2.8409760907695314e-06,
+ "loss": 0.4775,
+ "step": 928
+ },
+ {
+ "epoch": 2.905010438413361,
+ "grad_norm": 0.8095362186431885,
+ "learning_rate": 2.836686393335909e-06,
+ "loss": 0.4532,
+ "step": 929
+ },
+ {
+ "epoch": 2.9081419624217117,
+ "grad_norm": 0.7340645790100098,
+ "learning_rate": 2.8323956860667813e-06,
+ "loss": 0.4835,
+ "step": 930
+ },
+ {
+ "epoch": 2.911273486430063,
+ "grad_norm": 0.6970911026000977,
+ "learning_rate": 2.828103981831417e-06,
+ "loss": 0.4999,
+ "step": 931
+ },
+ {
+ "epoch": 2.914405010438413,
+ "grad_norm": 0.8136418461799622,
+ "learning_rate": 2.8238112935020794e-06,
+ "loss": 0.5038,
+ "step": 932
+ },
+ {
+ "epoch": 2.9175365344467643,
+ "grad_norm": 0.9045608043670654,
+ "learning_rate": 2.8195176339539816e-06,
+ "loss": 0.486,
+ "step": 933
+ },
+ {
+ "epoch": 2.9206680584551146,
+ "grad_norm": 1.14940345287323,
+ "learning_rate": 2.815223016065249e-06,
+ "loss": 0.5079,
+ "step": 934
+ },
+ {
+ "epoch": 2.9237995824634657,
+ "grad_norm": 0.7411190867424011,
+ "learning_rate": 2.8109274527168826e-06,
+ "loss": 0.4564,
+ "step": 935
+ },
+ {
+ "epoch": 2.9269311064718164,
+ "grad_norm": 0.8903455138206482,
+ "learning_rate": 2.806630956792719e-06,
+ "loss": 0.451,
+ "step": 936
+ },
+ {
+ "epoch": 2.930062630480167,
+ "grad_norm": 0.7865445017814636,
+ "learning_rate": 2.8023335411793904e-06,
+ "loss": 0.4658,
+ "step": 937
+ },
+ {
+ "epoch": 2.933194154488518,
+ "grad_norm": 0.8185790777206421,
+ "learning_rate": 2.798035218766292e-06,
+ "loss": 0.4776,
+ "step": 938
+ },
+ {
+ "epoch": 2.9363256784968685,
+ "grad_norm": 0.7516276836395264,
+ "learning_rate": 2.793736002445531e-06,
+ "loss": 0.4447,
+ "step": 939
+ },
+ {
+ "epoch": 2.9394572025052192,
+ "grad_norm": 0.738080620765686,
+ "learning_rate": 2.789435905111903e-06,
+ "loss": 0.4832,
+ "step": 940
+ },
+ {
+ "epoch": 2.94258872651357,
+ "grad_norm": 0.7971507906913757,
+ "learning_rate": 2.785134939662843e-06,
+ "loss": 0.4835,
+ "step": 941
+ },
+ {
+ "epoch": 2.9457202505219207,
+ "grad_norm": 0.7529093623161316,
+ "learning_rate": 2.78083311899839e-06,
+ "loss": 0.4759,
+ "step": 942
+ },
+ {
+ "epoch": 2.9488517745302714,
+ "grad_norm": 0.8222358226776123,
+ "learning_rate": 2.7765304560211482e-06,
+ "loss": 0.4365,
+ "step": 943
+ },
+ {
+ "epoch": 2.951983298538622,
+ "grad_norm": 0.729945182800293,
+ "learning_rate": 2.7722269636362462e-06,
+ "loss": 0.5026,
+ "step": 944
+ },
+ {
+ "epoch": 2.9551148225469728,
+ "grad_norm": 0.7287900447845459,
+ "learning_rate": 2.767922654751306e-06,
+ "loss": 0.4916,
+ "step": 945
+ },
+ {
+ "epoch": 2.9582463465553235,
+ "grad_norm": 0.869637131690979,
+ "learning_rate": 2.763617542276391e-06,
+ "loss": 0.5018,
+ "step": 946
+ },
+ {
+ "epoch": 2.961377870563674,
+ "grad_norm": 1.004909634590149,
+ "learning_rate": 2.7593116391239806e-06,
+ "loss": 0.5152,
+ "step": 947
+ },
+ {
+ "epoch": 2.964509394572025,
+ "grad_norm": 0.8263046145439148,
+ "learning_rate": 2.7550049582089235e-06,
+ "loss": 0.5249,
+ "step": 948
+ },
+ {
+ "epoch": 2.9676409185803756,
+ "grad_norm": 0.7963895797729492,
+ "learning_rate": 2.750697512448401e-06,
+ "loss": 0.5084,
+ "step": 949
+ },
+ {
+ "epoch": 2.9707724425887267,
+ "grad_norm": 0.7211249470710754,
+ "learning_rate": 2.7463893147618893e-06,
+ "loss": 0.4691,
+ "step": 950
+ },
+ {
+ "epoch": 2.973903966597077,
+ "grad_norm": 0.8010216951370239,
+ "learning_rate": 2.742080378071118e-06,
+ "loss": 0.5026,
+ "step": 951
+ },
+ {
+ "epoch": 2.977035490605428,
+ "grad_norm": 0.780078649520874,
+ "learning_rate": 2.7377707153000356e-06,
+ "loss": 0.4758,
+ "step": 952
+ },
+ {
+ "epoch": 2.980167014613779,
+ "grad_norm": 0.7728193998336792,
+ "learning_rate": 2.7334603393747684e-06,
+ "loss": 0.488,
+ "step": 953
+ },
+ {
+ "epoch": 2.9832985386221296,
+ "grad_norm": 0.836329996585846,
+ "learning_rate": 2.7291492632235777e-06,
+ "loss": 0.456,
+ "step": 954
+ },
+ {
+ "epoch": 2.9864300626304803,
+ "grad_norm": 0.7241990566253662,
+ "learning_rate": 2.724837499776831e-06,
+ "loss": 0.4953,
+ "step": 955
+ },
+ {
+ "epoch": 2.989561586638831,
+ "grad_norm": 0.7595076560974121,
+ "learning_rate": 2.7205250619669527e-06,
+ "loss": 0.446,
+ "step": 956
+ },
+ {
+ "epoch": 2.9926931106471817,
+ "grad_norm": 0.8177686333656311,
+ "learning_rate": 2.716211962728392e-06,
+ "loss": 0.5057,
+ "step": 957
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 1914,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 319,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 2.9005238427891794e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-957/training_args.bin b/checkpoint-957/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..8067ee9c1c0bc752bdfd00cfcaf1a6e717d2356b
--- /dev/null
+++ b/checkpoint-957/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:c650156a192cae0a2070c4323ee8a93e9b52fb76041d59ae0633b98389585727
+size 7928
diff --git a/checkpoint-957/zero_to_fp32.py b/checkpoint-957/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-957/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/config.json b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..b7324bbcd3035a34c2ac96f0e2a46dd94a5db25c
--- /dev/null
+++ b/config.json
@@ -0,0 +1,1497 @@
+{
+ "_attn_implementation_autoset": true,
+ "_name_or_path": "nvidia/Llama-3_3-Nemotron-Super-49B-v1",
+ "architectures": [
+ "DeciLMForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "auto_map": {
+ "AutoConfig": "nvidia/Llama-3_3-Nemotron-Super-49B-v1--configuration_decilm.DeciLMConfig",
+ "AutoModelForCausalLM": "nvidia/Llama-3_3-Nemotron-Super-49B-v1--modeling_decilm.DeciLMForCausalLM"
+ },
+ "block_configs": [
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 2.625,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 2.625,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 2.625,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 3.28125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 2.625,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 2.625,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 2.625,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.3125,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 0.5,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 0.5,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 0.5,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 0.5,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 1.0,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 0.5,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": null,
+ "no_op": true,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 0.5,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ },
+ {
+ "attention": {
+ "n_heads_in_group": 8,
+ "no_op": false,
+ "num_sink_tokens": null,
+ "replace_with_linear": false,
+ "sparsify": null,
+ "unshifted_sink": false,
+ "use_prefill_window_in_sink_attention": false,
+ "window_length": null
+ },
+ "ffn": {
+ "ffn_mult": 5.25,
+ "no_op": false,
+ "replace_with_linear": false,
+ "sparsify": null
+ }
+ }
+ ],
+ "bos_token_id": 128000,
+ "eos_token_id": 128009,
+ "hidden_act": "silu",
+ "hidden_size": 8192,
+ "initializer_range": 0.02,
+ "intermediate_size": null,
+ "max_position_embeddings": 131072,
+ "mlp_bias": false,
+ "model_type": "nemotron-nas",
+ "num_attention_heads": 64,
+ "num_hidden_layers": 80,
+ "num_key_value_heads": null,
+ "pretraining_tp": 1,
+ "quantization_config": {
+ "_load_in_4bit": true,
+ "_load_in_8bit": false,
+ "bnb_4bit_compute_dtype": "bfloat16",
+ "bnb_4bit_quant_storage": "bfloat16",
+ "bnb_4bit_quant_type": "nf4",
+ "bnb_4bit_use_double_quant": true,
+ "llm_int8_enable_fp32_cpu_offload": false,
+ "llm_int8_has_fp16_weight": false,
+ "llm_int8_skip_modules": null,
+ "llm_int8_threshold": 6.0,
+ "load_in_4bit": true,
+ "load_in_8bit": false,
+ "quant_method": "bitsandbytes"
+ },
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": {
+ "factor": 8.0,
+ "high_freq_factor": 4.0,
+ "low_freq_factor": 1.0,
+ "original_max_position_embeddings": 8192,
+ "rope_type": "llama3"
+ },
+ "rope_theta": 500000.0,
+ "tie_word_embeddings": false,
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.49.0",
+ "use_cache": false,
+ "vocab_size": 128256
+}
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.json b/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/tokenizer_config.json b/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..edd01b980c1db496ea102a51c972ee8f5d1a2c74
--- /dev/null
+++ b/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}{%- if messages[0]['role'] == 'system' %}{%- set system_message = messages[0]['content']|trim %}{%- set messages = messages[1:] %}{%- else %}{%- set system_message = \"\" %}{%- endif %}{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}{{- system_message }}{{- \"<|eot_id|>\" }}{%- for message in messages %}{%- if message['role'] == 'assistant' and '' in message['content'] %}{%- set content = message['content'].split('')[-1].lstrip() %}{%- else %}{%- set content = message['content'] %}{%- endif %}{{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n' + content | trim + '<|eot_id|>' }}{%- endfor %}{%- if add_generation_prompt %}{{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}{%- endif %}",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}