diff --git a/.gitattributes b/.gitattributes
index a6344aac8c09253b3b630fb776ae94478aa0275b..abb730befc6630b5da88d8ddf4be35960b9ec0b9 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -33,3 +33,10 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
*.zip filter=lfs diff=lfs merge=lfs -text
*.zst filter=lfs diff=lfs merge=lfs -text
*tfevents* filter=lfs diff=lfs merge=lfs -text
+checkpoint-144/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-216/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-288/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-360/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-432/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+checkpoint-72/tokenizer.json filter=lfs diff=lfs merge=lfs -text
+tokenizer.json filter=lfs diff=lfs merge=lfs -text
diff --git a/README.md b/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..af3e54c9d2a29d3856adc4e075f0d8fc7d7ab5f7
--- /dev/null
+++ b/README.md
@@ -0,0 +1,146 @@
+---
+library_name: peft
+license: llama3.3
+base_model: meta-llama/Llama-3.3-70B-Instruct
+tags:
+- generated_from_trainer
+datasets:
+- ugaoo/multimedqa_wrongllama3370
+model-index:
+- name: out/multimedqa_wrongllama3370
+ results: []
+---
+
+
+
+[
](https://github.com/axolotl-ai-cloud/axolotl)
+See axolotl config
+
+axolotl version: `0.8.0.dev0`
+```yaml
+base_model: meta-llama/Llama-3.3-70B-Instruct
+model_type: AutoModelForCausalLM
+tokenizer_type: AutoTokenizer
+trust_remote_code: true
+
+load_in_8bit: false
+load_in_4bit: true
+strict: false
+
+datasets:
+ - path: ugaoo/multimedqa_wrongllama3370
+ type: alpaca
+val_set_size: 0
+output_dir: ./out/multimedqa_wrongllama3370
+
+sequence_len: 4000
+sample_packing: true
+pad_to_sequence_len: true
+
+adapter: qlora
+lora_r: 256
+lora_alpha: 512
+lora_dropout: 0.05
+lora_target_linear: true
+lora_target_modules:
+ - q_proj
+ - k_proj
+ - v_proj
+ - o_proj
+ - up_proj
+ - down_proj
+ - gate_proj
+lora_modules_to_save:
+ - embed_tokens
+ - lm_head
+
+wandb_project: cosmosearch
+wandb_entity:
+wandb_watch:
+wandb_name: multimedqa_wrongllama3370_llama33
+wandb_log_model:
+
+gradient_accumulation_steps: 3
+micro_batch_size: 4
+num_epochs: 6
+optimizer: adamw_torch
+lr_scheduler: cosine
+learning_rate: 5e-6
+
+train_on_inputs: false
+group_by_length: false
+bf16: auto
+fp16: false
+tf32: false
+
+gradient_checkpointing: true
+early_stopping_patience:
+resume_from_checkpoint:
+logging_steps: 1
+xformers_attention:
+flash_attention: true
+
+warmup_steps: 100
+evals_per_epoch: 6
+eval_table_size:
+saves_per_epoch: 1
+debug:
+deepspeed:
+weight_decay: 0.0
+fsdp:
+fsdp_config:
+save_total_limit: 6
+special_tokens:
+ pad_token: <|end_of_text|>
+
+```
+
+
+
+# out/multimedqa_wrongllama3370
+
+This model is a fine-tuned version of [meta-llama/Llama-3.3-70B-Instruct](https://huggingface.co/meta-llama/Llama-3.3-70B-Instruct) on the ugaoo/multimedqa_wrongllama3370 dataset.
+
+## Model description
+
+More information needed
+
+## Intended uses & limitations
+
+More information needed
+
+## Training and evaluation data
+
+More information needed
+
+## Training procedure
+
+### Training hyperparameters
+
+The following hyperparameters were used during training:
+- learning_rate: 5e-06
+- train_batch_size: 4
+- eval_batch_size: 4
+- seed: 42
+- distributed_type: multi-GPU
+- num_devices: 3
+- gradient_accumulation_steps: 3
+- total_train_batch_size: 36
+- total_eval_batch_size: 12
+- optimizer: Use OptimizerNames.ADAMW_TORCH with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
+- lr_scheduler_type: cosine
+- lr_scheduler_warmup_steps: 100
+- num_epochs: 6.0
+
+### Training results
+
+
+
+### Framework versions
+
+- PEFT 0.15.0
+- Transformers 4.49.0
+- Pytorch 2.5.1+cu124
+- Datasets 3.4.1
+- Tokenizers 0.21.1
\ No newline at end of file
diff --git a/adapter_config.json b/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/adapter_model.safetensors b/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d1b4b5e90cad3f475a12c76299351098820ed1c8
--- /dev/null
+++ b/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:88e1a2c48f01ab33b45cd092a26df5e1d3846df9491f36c1d0732fad8129233e
+size 10829849744
diff --git a/checkpoint-144/README.md b/checkpoint-144/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b184114a0c28ed3e4c082c18486736dc818166d
--- /dev/null
+++ b/checkpoint-144/README.md
@@ -0,0 +1,202 @@
+---
+base_model: meta-llama/Llama-3.3-70B-Instruct
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-144/adapter_config.json b/checkpoint-144/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/checkpoint-144/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-144/adapter_model.safetensors b/checkpoint-144/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..9356085104df79a5861529947dd0a02ec2ff896d
--- /dev/null
+++ b/checkpoint-144/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:60ab0c477f0ba08a025de003f380f8d5e9502a0ccb30a23df3faa666e1879b15
+size 10829849744
diff --git a/checkpoint-144/global_step143/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-144/global_step143/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..52f73e33fdfcf63ccee8b0739f5e6ec967111c75
--- /dev/null
+++ b/checkpoint-144/global_step143/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:02b82f769bfa77ec3d95e9e00d37b4dd0561c17e1676fe7bdec045b225236168
+size 21659418140
diff --git a/checkpoint-144/global_step143/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-144/global_step143/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..4ffe5ff0a7dfde1ae74d0fbdd4e3c075d022e2b6
--- /dev/null
+++ b/checkpoint-144/global_step143/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f219e10c3dcaf196f451d5c94f7874cf958238a876ad87cf061e1d4ad9a4b37
+size 21659457372
diff --git a/checkpoint-144/global_step143/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/checkpoint-144/global_step143/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..161e5a6ae147b044ac4e39075a4724eb55eb7af5
--- /dev/null
+++ b/checkpoint-144/global_step143/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:195aeb2c10e2dca1ea6fd44766ca0133bd1e7c27580aa08596c78f71edf5bc37
+size 21659417820
diff --git a/checkpoint-144/global_step143/mp_rank_00_model_states.pt b/checkpoint-144/global_step143/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..01dd78f3e0ced35836f3b57a0bc29ccbd246ebc7
--- /dev/null
+++ b/checkpoint-144/global_step143/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:30d6b60bec118293910887a16cdba1983bd5cf3ee771dc74c1f68534c152ccc4
+size 11918643933
diff --git a/checkpoint-144/latest b/checkpoint-144/latest
new file mode 100644
index 0000000000000000000000000000000000000000..93407f5a9fdef065b428ddd4b9440e88eb65a982
--- /dev/null
+++ b/checkpoint-144/latest
@@ -0,0 +1 @@
+global_step143
\ No newline at end of file
diff --git a/checkpoint-144/rng_state_0.pth b/checkpoint-144/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..29303ed5d96f7125aa533e8f4f370b3a08291a34
--- /dev/null
+++ b/checkpoint-144/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:29d6caec6c2fae12f920933e1910316ce6a7afaa0075ccfc80a77180b449b33a
+size 14768
diff --git a/checkpoint-144/rng_state_1.pth b/checkpoint-144/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..34c409fff551eee52ee7d654ec0a4c063de422ce
--- /dev/null
+++ b/checkpoint-144/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6888aeb8b95843a249b07863b54152098e1ced5d0f5c90f7d9c994720b136098
+size 14768
diff --git a/checkpoint-144/rng_state_2.pth b/checkpoint-144/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..482b4d79a133e24aafa169426c286c433b9db70c
--- /dev/null
+++ b/checkpoint-144/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f80b53aacb3e90646467b0a36827a0234a1881a7d774b197685ed3999fa2a55e
+size 14768
diff --git a/checkpoint-144/scheduler.pt b/checkpoint-144/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..e2dde2b518bcd4fd63433ef1bc0e8e93df9dd450
--- /dev/null
+++ b/checkpoint-144/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f3c70fd0b9904cd462c44b91c3a6304d50f808b742c89f710f481983baa6497c
+size 1064
diff --git a/checkpoint-144/special_tokens_map.json b/checkpoint-144/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-144/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-144/tokenizer.json b/checkpoint-144/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-144/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-144/tokenizer_config.json b/checkpoint-144/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/checkpoint-144/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-144/trainer_state.json b/checkpoint-144/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..e2255b837d08bb2221da44a73db1e3ed25c93074
--- /dev/null
+++ b/checkpoint-144/trainer_state.json
@@ -0,0 +1,1041 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.9770642201834863,
+ "eval_steps": 500,
+ "global_step": 144,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.013761467889908258,
+ "grad_norm": 38.02450942993164,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 3.125,
+ "step": 1
+ },
+ {
+ "epoch": 0.027522935779816515,
+ "grad_norm": 37.864768981933594,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 3.0998,
+ "step": 2
+ },
+ {
+ "epoch": 0.04128440366972477,
+ "grad_norm": 38.34700012207031,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 3.1533,
+ "step": 3
+ },
+ {
+ "epoch": 0.05504587155963303,
+ "grad_norm": 38.33641815185547,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 3.1542,
+ "step": 4
+ },
+ {
+ "epoch": 0.06880733944954129,
+ "grad_norm": 38.064449310302734,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 3.1153,
+ "step": 5
+ },
+ {
+ "epoch": 0.08256880733944955,
+ "grad_norm": 37.92089080810547,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 3.0867,
+ "step": 6
+ },
+ {
+ "epoch": 0.0963302752293578,
+ "grad_norm": 38.120323181152344,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 3.093,
+ "step": 7
+ },
+ {
+ "epoch": 0.11009174311926606,
+ "grad_norm": 38.47222900390625,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 3.1056,
+ "step": 8
+ },
+ {
+ "epoch": 0.12385321100917432,
+ "grad_norm": 38.013702392578125,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 3.0474,
+ "step": 9
+ },
+ {
+ "epoch": 0.13761467889908258,
+ "grad_norm": 38.17593002319336,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 3.0264,
+ "step": 10
+ },
+ {
+ "epoch": 0.15137614678899083,
+ "grad_norm": 38.60066604614258,
+ "learning_rate": 5.5e-07,
+ "loss": 2.9404,
+ "step": 11
+ },
+ {
+ "epoch": 0.1651376146788991,
+ "grad_norm": 38.83498764038086,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 2.9571,
+ "step": 12
+ },
+ {
+ "epoch": 0.17889908256880735,
+ "grad_norm": 38.942047119140625,
+ "learning_rate": 6.5e-07,
+ "loss": 2.8849,
+ "step": 13
+ },
+ {
+ "epoch": 0.1926605504587156,
+ "grad_norm": 38.0286865234375,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 2.7486,
+ "step": 14
+ },
+ {
+ "epoch": 0.20642201834862386,
+ "grad_norm": 38.31856155395508,
+ "learning_rate": 7.5e-07,
+ "loss": 2.6876,
+ "step": 15
+ },
+ {
+ "epoch": 0.22018348623853212,
+ "grad_norm": 38.124759674072266,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 2.5992,
+ "step": 16
+ },
+ {
+ "epoch": 0.23394495412844038,
+ "grad_norm": 36.59762191772461,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 2.4063,
+ "step": 17
+ },
+ {
+ "epoch": 0.24770642201834864,
+ "grad_norm": 36.63874435424805,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 2.3109,
+ "step": 18
+ },
+ {
+ "epoch": 0.26146788990825687,
+ "grad_norm": 36.768577575683594,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 2.1677,
+ "step": 19
+ },
+ {
+ "epoch": 0.27522935779816515,
+ "grad_norm": 36.187137603759766,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 1.9551,
+ "step": 20
+ },
+ {
+ "epoch": 0.2889908256880734,
+ "grad_norm": 35.55617141723633,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 1.8053,
+ "step": 21
+ },
+ {
+ "epoch": 0.30275229357798167,
+ "grad_norm": 34.60952377319336,
+ "learning_rate": 1.1e-06,
+ "loss": 1.5654,
+ "step": 22
+ },
+ {
+ "epoch": 0.3165137614678899,
+ "grad_norm": 33.69620895385742,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 1.3454,
+ "step": 23
+ },
+ {
+ "epoch": 0.3302752293577982,
+ "grad_norm": 34.33642578125,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 1.2417,
+ "step": 24
+ },
+ {
+ "epoch": 0.3440366972477064,
+ "grad_norm": 31.23066520690918,
+ "learning_rate": 1.25e-06,
+ "loss": 0.9839,
+ "step": 25
+ },
+ {
+ "epoch": 0.3577981651376147,
+ "grad_norm": 25.810237884521484,
+ "learning_rate": 1.3e-06,
+ "loss": 0.793,
+ "step": 26
+ },
+ {
+ "epoch": 0.37155963302752293,
+ "grad_norm": 23.06692886352539,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 0.6082,
+ "step": 27
+ },
+ {
+ "epoch": 0.3853211009174312,
+ "grad_norm": 19.828439712524414,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 0.4845,
+ "step": 28
+ },
+ {
+ "epoch": 0.39908256880733944,
+ "grad_norm": 14.150300025939941,
+ "learning_rate": 1.45e-06,
+ "loss": 0.348,
+ "step": 29
+ },
+ {
+ "epoch": 0.41284403669724773,
+ "grad_norm": 9.044266700744629,
+ "learning_rate": 1.5e-06,
+ "loss": 0.2516,
+ "step": 30
+ },
+ {
+ "epoch": 0.42660550458715596,
+ "grad_norm": 5.704404354095459,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 0.177,
+ "step": 31
+ },
+ {
+ "epoch": 0.44036697247706424,
+ "grad_norm": 3.2953503131866455,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 0.1391,
+ "step": 32
+ },
+ {
+ "epoch": 0.4541284403669725,
+ "grad_norm": 2.453219413757324,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 0.0982,
+ "step": 33
+ },
+ {
+ "epoch": 0.46788990825688076,
+ "grad_norm": 2.0325512886047363,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 0.0807,
+ "step": 34
+ },
+ {
+ "epoch": 0.481651376146789,
+ "grad_norm": 1.6322681903839111,
+ "learning_rate": 1.75e-06,
+ "loss": 0.0725,
+ "step": 35
+ },
+ {
+ "epoch": 0.4954128440366973,
+ "grad_norm": 0.9713364839553833,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 0.067,
+ "step": 36
+ },
+ {
+ "epoch": 0.5091743119266054,
+ "grad_norm": 0.7980225682258606,
+ "learning_rate": 1.85e-06,
+ "loss": 0.0582,
+ "step": 37
+ },
+ {
+ "epoch": 0.5229357798165137,
+ "grad_norm": 1.0616590976715088,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 0.0562,
+ "step": 38
+ },
+ {
+ "epoch": 0.536697247706422,
+ "grad_norm": 1.053462028503418,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 0.0537,
+ "step": 39
+ },
+ {
+ "epoch": 0.5504587155963303,
+ "grad_norm": 0.9452660083770752,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 0.0602,
+ "step": 40
+ },
+ {
+ "epoch": 0.5642201834862385,
+ "grad_norm": 0.830368161201477,
+ "learning_rate": 2.05e-06,
+ "loss": 0.0549,
+ "step": 41
+ },
+ {
+ "epoch": 0.5779816513761468,
+ "grad_norm": 0.5791187882423401,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 0.0479,
+ "step": 42
+ },
+ {
+ "epoch": 0.591743119266055,
+ "grad_norm": 0.44175243377685547,
+ "learning_rate": 2.15e-06,
+ "loss": 0.0461,
+ "step": 43
+ },
+ {
+ "epoch": 0.6055045871559633,
+ "grad_norm": 0.37655699253082275,
+ "learning_rate": 2.2e-06,
+ "loss": 0.043,
+ "step": 44
+ },
+ {
+ "epoch": 0.6192660550458715,
+ "grad_norm": 0.34382495284080505,
+ "learning_rate": 2.25e-06,
+ "loss": 0.0454,
+ "step": 45
+ },
+ {
+ "epoch": 0.6330275229357798,
+ "grad_norm": 0.5047216415405273,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 0.0437,
+ "step": 46
+ },
+ {
+ "epoch": 0.6467889908256881,
+ "grad_norm": 0.6318779587745667,
+ "learning_rate": 2.35e-06,
+ "loss": 0.0468,
+ "step": 47
+ },
+ {
+ "epoch": 0.6605504587155964,
+ "grad_norm": 0.5135455131530762,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 0.0494,
+ "step": 48
+ },
+ {
+ "epoch": 0.6743119266055045,
+ "grad_norm": 0.4802612066268921,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 0.0441,
+ "step": 49
+ },
+ {
+ "epoch": 0.6880733944954128,
+ "grad_norm": 0.6157718300819397,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0398,
+ "step": 50
+ },
+ {
+ "epoch": 0.7018348623853211,
+ "grad_norm": 0.4327130913734436,
+ "learning_rate": 2.55e-06,
+ "loss": 0.0438,
+ "step": 51
+ },
+ {
+ "epoch": 0.7155963302752294,
+ "grad_norm": 0.46133658289909363,
+ "learning_rate": 2.6e-06,
+ "loss": 0.041,
+ "step": 52
+ },
+ {
+ "epoch": 0.7293577981651376,
+ "grad_norm": 0.5729146003723145,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 0.0406,
+ "step": 53
+ },
+ {
+ "epoch": 0.7431192660550459,
+ "grad_norm": 0.32373812794685364,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 0.0419,
+ "step": 54
+ },
+ {
+ "epoch": 0.7568807339449541,
+ "grad_norm": 0.29006752371788025,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 0.0415,
+ "step": 55
+ },
+ {
+ "epoch": 0.7706422018348624,
+ "grad_norm": 0.31038960814476013,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 0.0344,
+ "step": 56
+ },
+ {
+ "epoch": 0.7844036697247706,
+ "grad_norm": 0.2324836701154709,
+ "learning_rate": 2.85e-06,
+ "loss": 0.0374,
+ "step": 57
+ },
+ {
+ "epoch": 0.7981651376146789,
+ "grad_norm": 0.5083625912666321,
+ "learning_rate": 2.9e-06,
+ "loss": 0.0324,
+ "step": 58
+ },
+ {
+ "epoch": 0.8119266055045872,
+ "grad_norm": 0.2873130142688751,
+ "learning_rate": 2.95e-06,
+ "loss": 0.0403,
+ "step": 59
+ },
+ {
+ "epoch": 0.8256880733944955,
+ "grad_norm": 0.437663197517395,
+ "learning_rate": 3e-06,
+ "loss": 0.0368,
+ "step": 60
+ },
+ {
+ "epoch": 0.8394495412844036,
+ "grad_norm": 0.5645247101783752,
+ "learning_rate": 3.05e-06,
+ "loss": 0.0386,
+ "step": 61
+ },
+ {
+ "epoch": 0.8532110091743119,
+ "grad_norm": 0.40374210476875305,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 0.0425,
+ "step": 62
+ },
+ {
+ "epoch": 0.8669724770642202,
+ "grad_norm": 0.46468955278396606,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 0.0323,
+ "step": 63
+ },
+ {
+ "epoch": 0.8807339449541285,
+ "grad_norm": 0.29952895641326904,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 0.0325,
+ "step": 64
+ },
+ {
+ "epoch": 0.8944954128440367,
+ "grad_norm": 0.3678436279296875,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 0.036,
+ "step": 65
+ },
+ {
+ "epoch": 0.908256880733945,
+ "grad_norm": 0.5068934559822083,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 0.0357,
+ "step": 66
+ },
+ {
+ "epoch": 0.9220183486238532,
+ "grad_norm": 0.2723177671432495,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 0.0333,
+ "step": 67
+ },
+ {
+ "epoch": 0.9357798165137615,
+ "grad_norm": 0.41696834564208984,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 0.0347,
+ "step": 68
+ },
+ {
+ "epoch": 0.9495412844036697,
+ "grad_norm": 0.2582981288433075,
+ "learning_rate": 3.45e-06,
+ "loss": 0.0283,
+ "step": 69
+ },
+ {
+ "epoch": 0.963302752293578,
+ "grad_norm": 0.40648311376571655,
+ "learning_rate": 3.5e-06,
+ "loss": 0.0293,
+ "step": 70
+ },
+ {
+ "epoch": 0.9770642201834863,
+ "grad_norm": 0.4149394631385803,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 0.0311,
+ "step": 71
+ },
+ {
+ "epoch": 0.9908256880733946,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 0.0346,
+ "step": 72
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.65e-06,
+ "loss": 0.0291,
+ "step": 73
+ },
+ {
+ "epoch": 1.0137614678899083,
+ "grad_norm": 0.5016496181488037,
+ "learning_rate": 3.7e-06,
+ "loss": 0.0286,
+ "step": 74
+ },
+ {
+ "epoch": 1.0275229357798166,
+ "grad_norm": 0.3533766567707062,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 0.0321,
+ "step": 75
+ },
+ {
+ "epoch": 1.0412844036697249,
+ "grad_norm": 0.2785470485687256,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 0.0277,
+ "step": 76
+ },
+ {
+ "epoch": 1.0550458715596331,
+ "grad_norm": 0.4530641436576843,
+ "learning_rate": 3.85e-06,
+ "loss": 0.0294,
+ "step": 77
+ },
+ {
+ "epoch": 1.0688073394495412,
+ "grad_norm": 0.3170749843120575,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.0274,
+ "step": 78
+ },
+ {
+ "epoch": 1.0825688073394495,
+ "grad_norm": 0.26502758264541626,
+ "learning_rate": 3.95e-06,
+ "loss": 0.0284,
+ "step": 79
+ },
+ {
+ "epoch": 1.0963302752293578,
+ "grad_norm": 0.5486436486244202,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.0285,
+ "step": 80
+ },
+ {
+ "epoch": 1.110091743119266,
+ "grad_norm": 0.24868083000183105,
+ "learning_rate": 4.05e-06,
+ "loss": 0.0301,
+ "step": 81
+ },
+ {
+ "epoch": 1.1238532110091743,
+ "grad_norm": 0.3448987305164337,
+ "learning_rate": 4.1e-06,
+ "loss": 0.0261,
+ "step": 82
+ },
+ {
+ "epoch": 1.1376146788990826,
+ "grad_norm": 0.3330553472042084,
+ "learning_rate": 4.15e-06,
+ "loss": 0.03,
+ "step": 83
+ },
+ {
+ "epoch": 1.151376146788991,
+ "grad_norm": 0.3379852771759033,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.0286,
+ "step": 84
+ },
+ {
+ "epoch": 1.165137614678899,
+ "grad_norm": 0.23678433895111084,
+ "learning_rate": 4.25e-06,
+ "loss": 0.0245,
+ "step": 85
+ },
+ {
+ "epoch": 1.1788990825688073,
+ "grad_norm": 0.24502314627170563,
+ "learning_rate": 4.3e-06,
+ "loss": 0.0267,
+ "step": 86
+ },
+ {
+ "epoch": 1.1926605504587156,
+ "grad_norm": 0.34288597106933594,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.0259,
+ "step": 87
+ },
+ {
+ "epoch": 1.2064220183486238,
+ "grad_norm": 0.20595045387744904,
+ "learning_rate": 4.4e-06,
+ "loss": 0.0241,
+ "step": 88
+ },
+ {
+ "epoch": 1.2201834862385321,
+ "grad_norm": 0.28399360179901123,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.0277,
+ "step": 89
+ },
+ {
+ "epoch": 1.2339449541284404,
+ "grad_norm": 0.273929238319397,
+ "learning_rate": 4.5e-06,
+ "loss": 0.0261,
+ "step": 90
+ },
+ {
+ "epoch": 1.2477064220183487,
+ "grad_norm": 0.24288330972194672,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.0267,
+ "step": 91
+ },
+ {
+ "epoch": 1.261467889908257,
+ "grad_norm": 0.42502400279045105,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.0252,
+ "step": 92
+ },
+ {
+ "epoch": 1.2752293577981653,
+ "grad_norm": 0.17670072615146637,
+ "learning_rate": 4.65e-06,
+ "loss": 0.0231,
+ "step": 93
+ },
+ {
+ "epoch": 1.2889908256880733,
+ "grad_norm": 0.23585423827171326,
+ "learning_rate": 4.7e-06,
+ "loss": 0.0213,
+ "step": 94
+ },
+ {
+ "epoch": 1.3027522935779816,
+ "grad_norm": 0.32558879256248474,
+ "learning_rate": 4.75e-06,
+ "loss": 0.0226,
+ "step": 95
+ },
+ {
+ "epoch": 1.31651376146789,
+ "grad_norm": 0.2908780872821808,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.0274,
+ "step": 96
+ },
+ {
+ "epoch": 1.3302752293577982,
+ "grad_norm": 0.3725607991218567,
+ "learning_rate": 4.85e-06,
+ "loss": 0.0241,
+ "step": 97
+ },
+ {
+ "epoch": 1.3440366972477065,
+ "grad_norm": 0.3833301067352295,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.0252,
+ "step": 98
+ },
+ {
+ "epoch": 1.3577981651376148,
+ "grad_norm": 0.32000771164894104,
+ "learning_rate": 4.95e-06,
+ "loss": 0.0236,
+ "step": 99
+ },
+ {
+ "epoch": 1.3715596330275228,
+ "grad_norm": 0.3203510344028473,
+ "learning_rate": 5e-06,
+ "loss": 0.0235,
+ "step": 100
+ },
+ {
+ "epoch": 1.385321100917431,
+ "grad_norm": 0.20016217231750488,
+ "learning_rate": 4.999888074163108e-06,
+ "loss": 0.0218,
+ "step": 101
+ },
+ {
+ "epoch": 1.3990825688073394,
+ "grad_norm": 0.3012026250362396,
+ "learning_rate": 4.999552306674345e-06,
+ "loss": 0.0234,
+ "step": 102
+ },
+ {
+ "epoch": 1.4128440366972477,
+ "grad_norm": 0.22818222641944885,
+ "learning_rate": 4.998992727598557e-06,
+ "loss": 0.0212,
+ "step": 103
+ },
+ {
+ "epoch": 1.426605504587156,
+ "grad_norm": 0.24128392338752747,
+ "learning_rate": 4.998209387040829e-06,
+ "loss": 0.0207,
+ "step": 104
+ },
+ {
+ "epoch": 1.4403669724770642,
+ "grad_norm": 0.3475555181503296,
+ "learning_rate": 4.9972023551419995e-06,
+ "loss": 0.0246,
+ "step": 105
+ },
+ {
+ "epoch": 1.4541284403669725,
+ "grad_norm": 0.21223627030849457,
+ "learning_rate": 4.995971722072379e-06,
+ "loss": 0.0237,
+ "step": 106
+ },
+ {
+ "epoch": 1.4678899082568808,
+ "grad_norm": 0.3776336908340454,
+ "learning_rate": 4.9945175980236745e-06,
+ "loss": 0.0218,
+ "step": 107
+ },
+ {
+ "epoch": 1.481651376146789,
+ "grad_norm": 0.25027793645858765,
+ "learning_rate": 4.992840113199131e-06,
+ "loss": 0.0214,
+ "step": 108
+ },
+ {
+ "epoch": 1.4954128440366974,
+ "grad_norm": 0.2559281885623932,
+ "learning_rate": 4.990939417801859e-06,
+ "loss": 0.0213,
+ "step": 109
+ },
+ {
+ "epoch": 1.5091743119266054,
+ "grad_norm": 0.33694687485694885,
+ "learning_rate": 4.988815682021398e-06,
+ "loss": 0.0182,
+ "step": 110
+ },
+ {
+ "epoch": 1.5229357798165137,
+ "grad_norm": 0.3175147473812103,
+ "learning_rate": 4.986469096018472e-06,
+ "loss": 0.0213,
+ "step": 111
+ },
+ {
+ "epoch": 1.536697247706422,
+ "grad_norm": 0.48777177929878235,
+ "learning_rate": 4.983899869907963e-06,
+ "loss": 0.0186,
+ "step": 112
+ },
+ {
+ "epoch": 1.5504587155963303,
+ "grad_norm": 0.34633246064186096,
+ "learning_rate": 4.981108233740096e-06,
+ "loss": 0.0224,
+ "step": 113
+ },
+ {
+ "epoch": 1.5642201834862384,
+ "grad_norm": 0.17302758991718292,
+ "learning_rate": 4.978094437479843e-06,
+ "loss": 0.0189,
+ "step": 114
+ },
+ {
+ "epoch": 1.5779816513761467,
+ "grad_norm": 0.24110931158065796,
+ "learning_rate": 4.97485875098454e-06,
+ "loss": 0.0174,
+ "step": 115
+ },
+ {
+ "epoch": 1.591743119266055,
+ "grad_norm": 0.2965283989906311,
+ "learning_rate": 4.971401463979722e-06,
+ "loss": 0.0174,
+ "step": 116
+ },
+ {
+ "epoch": 1.6055045871559632,
+ "grad_norm": 0.34184500575065613,
+ "learning_rate": 4.967722886033181e-06,
+ "loss": 0.0162,
+ "step": 117
+ },
+ {
+ "epoch": 1.6192660550458715,
+ "grad_norm": 0.3942873179912567,
+ "learning_rate": 4.963823346527249e-06,
+ "loss": 0.0176,
+ "step": 118
+ },
+ {
+ "epoch": 1.6330275229357798,
+ "grad_norm": 0.21840929985046387,
+ "learning_rate": 4.959703194629304e-06,
+ "loss": 0.018,
+ "step": 119
+ },
+ {
+ "epoch": 1.646788990825688,
+ "grad_norm": 0.23672759532928467,
+ "learning_rate": 4.955362799260507e-06,
+ "loss": 0.0204,
+ "step": 120
+ },
+ {
+ "epoch": 1.6605504587155964,
+ "grad_norm": 0.18776445090770721,
+ "learning_rate": 4.950802549062764e-06,
+ "loss": 0.0173,
+ "step": 121
+ },
+ {
+ "epoch": 1.6743119266055047,
+ "grad_norm": 0.279297411441803,
+ "learning_rate": 4.946022852363932e-06,
+ "loss": 0.0177,
+ "step": 122
+ },
+ {
+ "epoch": 1.688073394495413,
+ "grad_norm": 0.20893588662147522,
+ "learning_rate": 4.9410241371412525e-06,
+ "loss": 0.0189,
+ "step": 123
+ },
+ {
+ "epoch": 1.7018348623853212,
+ "grad_norm": 0.26409876346588135,
+ "learning_rate": 4.935806850983034e-06,
+ "loss": 0.0195,
+ "step": 124
+ },
+ {
+ "epoch": 1.7155963302752295,
+ "grad_norm": 0.23863324522972107,
+ "learning_rate": 4.9303714610485705e-06,
+ "loss": 0.0151,
+ "step": 125
+ },
+ {
+ "epoch": 1.7293577981651376,
+ "grad_norm": 0.25934213399887085,
+ "learning_rate": 4.924718454026318e-06,
+ "loss": 0.0157,
+ "step": 126
+ },
+ {
+ "epoch": 1.7431192660550459,
+ "grad_norm": 0.2923693358898163,
+ "learning_rate": 4.918848336090309e-06,
+ "loss": 0.0155,
+ "step": 127
+ },
+ {
+ "epoch": 1.7568807339449541,
+ "grad_norm": 0.16973069310188293,
+ "learning_rate": 4.912761632854834e-06,
+ "loss": 0.0156,
+ "step": 128
+ },
+ {
+ "epoch": 1.7706422018348624,
+ "grad_norm": 0.25908610224723816,
+ "learning_rate": 4.906458889327375e-06,
+ "loss": 0.0159,
+ "step": 129
+ },
+ {
+ "epoch": 1.7844036697247705,
+ "grad_norm": 0.27444136142730713,
+ "learning_rate": 4.899940669859807e-06,
+ "loss": 0.0159,
+ "step": 130
+ },
+ {
+ "epoch": 1.7981651376146788,
+ "grad_norm": 0.25167539715766907,
+ "learning_rate": 4.893207558097867e-06,
+ "loss": 0.0147,
+ "step": 131
+ },
+ {
+ "epoch": 1.811926605504587,
+ "grad_norm": 0.25777608156204224,
+ "learning_rate": 4.8862601569288885e-06,
+ "loss": 0.016,
+ "step": 132
+ },
+ {
+ "epoch": 1.8256880733944953,
+ "grad_norm": 0.24190428853034973,
+ "learning_rate": 4.879099088427824e-06,
+ "loss": 0.0128,
+ "step": 133
+ },
+ {
+ "epoch": 1.8394495412844036,
+ "grad_norm": 0.17028410732746124,
+ "learning_rate": 4.871724993801541e-06,
+ "loss": 0.0145,
+ "step": 134
+ },
+ {
+ "epoch": 1.853211009174312,
+ "grad_norm": 0.25141075253486633,
+ "learning_rate": 4.864138533331411e-06,
+ "loss": 0.0144,
+ "step": 135
+ },
+ {
+ "epoch": 1.8669724770642202,
+ "grad_norm": 0.6400424838066101,
+ "learning_rate": 4.8563403863141825e-06,
+ "loss": 0.0128,
+ "step": 136
+ },
+ {
+ "epoch": 1.8807339449541285,
+ "grad_norm": 0.22450514137744904,
+ "learning_rate": 4.84833125100116e-06,
+ "loss": 0.0167,
+ "step": 137
+ },
+ {
+ "epoch": 1.8944954128440368,
+ "grad_norm": 0.19940154254436493,
+ "learning_rate": 4.840111844535682e-06,
+ "loss": 0.0154,
+ "step": 138
+ },
+ {
+ "epoch": 1.908256880733945,
+ "grad_norm": 0.2946206033229828,
+ "learning_rate": 4.8316829028889076e-06,
+ "loss": 0.0158,
+ "step": 139
+ },
+ {
+ "epoch": 1.9220183486238533,
+ "grad_norm": 0.4694023132324219,
+ "learning_rate": 4.823045180793914e-06,
+ "loss": 0.0153,
+ "step": 140
+ },
+ {
+ "epoch": 1.9357798165137616,
+ "grad_norm": 0.23655226826667786,
+ "learning_rate": 4.8141994516781196e-06,
+ "loss": 0.0109,
+ "step": 141
+ },
+ {
+ "epoch": 1.9495412844036697,
+ "grad_norm": 0.23846553266048431,
+ "learning_rate": 4.805146507594034e-06,
+ "loss": 0.0129,
+ "step": 142
+ },
+ {
+ "epoch": 1.963302752293578,
+ "grad_norm": 0.2763686180114746,
+ "learning_rate": 4.7958871591483305e-06,
+ "loss": 0.0124,
+ "step": 143
+ },
+ {
+ "epoch": 1.9770642201834863,
+ "grad_norm": 0.23522883653640747,
+ "learning_rate": 4.786422235429269e-06,
+ "loss": 0.0125,
+ "step": 144
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 432,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 72,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 9.238765964147818e+18,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-144/training_args.bin b/checkpoint-144/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6b48f4b52f5bfff81ec8534f6510460a8014f336
--- /dev/null
+++ b/checkpoint-144/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cbafef0713d4b794ca3a92a04d378baaf3fa7647c9af95948bfb2ef7c0e02eda
+size 7928
diff --git a/checkpoint-144/zero_to_fp32.py b/checkpoint-144/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-144/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-216/README.md b/checkpoint-216/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b184114a0c28ed3e4c082c18486736dc818166d
--- /dev/null
+++ b/checkpoint-216/README.md
@@ -0,0 +1,202 @@
+---
+base_model: meta-llama/Llama-3.3-70B-Instruct
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-216/adapter_config.json b/checkpoint-216/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/checkpoint-216/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-216/adapter_model.safetensors b/checkpoint-216/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..13310dd3a00ae75b6fd894781dd0e46363806b4c
--- /dev/null
+++ b/checkpoint-216/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:575a73ba151bcb38cebf7d58a749006b844d696ce4a43052c187e800e3475bce
+size 10829849744
diff --git a/checkpoint-216/global_step215/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-216/global_step215/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0efa1759e68ed830fceab622bc5c9887c8a017dd
--- /dev/null
+++ b/checkpoint-216/global_step215/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fe1a41121b772d1b7da837df8089ddaadef2860828167b36c67bc24437996362
+size 21659418140
diff --git a/checkpoint-216/global_step215/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-216/global_step215/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..4e7993dc92bbe2740a6136d24d161212afc40428
--- /dev/null
+++ b/checkpoint-216/global_step215/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:792c2a6e66fc5b6ddcaea95063e4c5b938cb46227d0495be294e5ee0e3bb0849
+size 21659457372
diff --git a/checkpoint-216/global_step215/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/checkpoint-216/global_step215/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..260ad9dcd847336f3438616b247d9ed9794a5ffe
--- /dev/null
+++ b/checkpoint-216/global_step215/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e26fc97ac1a1915ddd70c3879b01ce3eb9ec4d7c35ea76fa4d92360d851c8017
+size 21659417820
diff --git a/checkpoint-216/global_step215/mp_rank_00_model_states.pt b/checkpoint-216/global_step215/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..c21b8830a7642f1f5251aa4819b205a97aaa6b7f
--- /dev/null
+++ b/checkpoint-216/global_step215/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8929d0facbe0b81ac571a626a4dc71fd06c1efef99560e21cfb72a199ea230ab
+size 11918643933
diff --git a/checkpoint-216/latest b/checkpoint-216/latest
new file mode 100644
index 0000000000000000000000000000000000000000..38df052cdc535cda9868110b8dd4a49ba3a2a4f9
--- /dev/null
+++ b/checkpoint-216/latest
@@ -0,0 +1 @@
+global_step215
\ No newline at end of file
diff --git a/checkpoint-216/rng_state_0.pth b/checkpoint-216/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..ad72c19d0ccff0a7121ed17f3844b1eb1b1ba644
--- /dev/null
+++ b/checkpoint-216/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:01b8982cb00376a2305d1408b1b6f10a9f5ac6bb57899e19a07d6d9b6f813294
+size 14768
diff --git a/checkpoint-216/rng_state_1.pth b/checkpoint-216/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..733c2b8647a655449931bad65d8780fcbe83351b
--- /dev/null
+++ b/checkpoint-216/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b2fd5f45668d232171745b201c3f58e0384b062f6e10224b2e7193afbecb9181
+size 14768
diff --git a/checkpoint-216/rng_state_2.pth b/checkpoint-216/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..4fc271be79dbb0cad803465df20f92ebbf4b326a
--- /dev/null
+++ b/checkpoint-216/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3e0dc7a48d742c5e839e5cb65cf084dd83de3b6c0b4453af6d8fa977411aaa9d
+size 14768
diff --git a/checkpoint-216/scheduler.pt b/checkpoint-216/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..5aa48f9e2224074123d70cbfd49c476a8ca21ea3
--- /dev/null
+++ b/checkpoint-216/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8a177484072060bf319ca9a44b8c986d20ca392d8b2158584c14221fe24d8381
+size 1064
diff --git a/checkpoint-216/special_tokens_map.json b/checkpoint-216/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-216/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-216/tokenizer.json b/checkpoint-216/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-216/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-216/tokenizer_config.json b/checkpoint-216/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/checkpoint-216/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-216/trainer_state.json b/checkpoint-216/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..fc0a215506bef35bd9e202c52ba6d419b40d7e7b
--- /dev/null
+++ b/checkpoint-216/trainer_state.json
@@ -0,0 +1,1545 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 2.963302752293578,
+ "eval_steps": 500,
+ "global_step": 216,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.013761467889908258,
+ "grad_norm": 38.02450942993164,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 3.125,
+ "step": 1
+ },
+ {
+ "epoch": 0.027522935779816515,
+ "grad_norm": 37.864768981933594,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 3.0998,
+ "step": 2
+ },
+ {
+ "epoch": 0.04128440366972477,
+ "grad_norm": 38.34700012207031,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 3.1533,
+ "step": 3
+ },
+ {
+ "epoch": 0.05504587155963303,
+ "grad_norm": 38.33641815185547,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 3.1542,
+ "step": 4
+ },
+ {
+ "epoch": 0.06880733944954129,
+ "grad_norm": 38.064449310302734,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 3.1153,
+ "step": 5
+ },
+ {
+ "epoch": 0.08256880733944955,
+ "grad_norm": 37.92089080810547,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 3.0867,
+ "step": 6
+ },
+ {
+ "epoch": 0.0963302752293578,
+ "grad_norm": 38.120323181152344,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 3.093,
+ "step": 7
+ },
+ {
+ "epoch": 0.11009174311926606,
+ "grad_norm": 38.47222900390625,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 3.1056,
+ "step": 8
+ },
+ {
+ "epoch": 0.12385321100917432,
+ "grad_norm": 38.013702392578125,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 3.0474,
+ "step": 9
+ },
+ {
+ "epoch": 0.13761467889908258,
+ "grad_norm": 38.17593002319336,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 3.0264,
+ "step": 10
+ },
+ {
+ "epoch": 0.15137614678899083,
+ "grad_norm": 38.60066604614258,
+ "learning_rate": 5.5e-07,
+ "loss": 2.9404,
+ "step": 11
+ },
+ {
+ "epoch": 0.1651376146788991,
+ "grad_norm": 38.83498764038086,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 2.9571,
+ "step": 12
+ },
+ {
+ "epoch": 0.17889908256880735,
+ "grad_norm": 38.942047119140625,
+ "learning_rate": 6.5e-07,
+ "loss": 2.8849,
+ "step": 13
+ },
+ {
+ "epoch": 0.1926605504587156,
+ "grad_norm": 38.0286865234375,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 2.7486,
+ "step": 14
+ },
+ {
+ "epoch": 0.20642201834862386,
+ "grad_norm": 38.31856155395508,
+ "learning_rate": 7.5e-07,
+ "loss": 2.6876,
+ "step": 15
+ },
+ {
+ "epoch": 0.22018348623853212,
+ "grad_norm": 38.124759674072266,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 2.5992,
+ "step": 16
+ },
+ {
+ "epoch": 0.23394495412844038,
+ "grad_norm": 36.59762191772461,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 2.4063,
+ "step": 17
+ },
+ {
+ "epoch": 0.24770642201834864,
+ "grad_norm": 36.63874435424805,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 2.3109,
+ "step": 18
+ },
+ {
+ "epoch": 0.26146788990825687,
+ "grad_norm": 36.768577575683594,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 2.1677,
+ "step": 19
+ },
+ {
+ "epoch": 0.27522935779816515,
+ "grad_norm": 36.187137603759766,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 1.9551,
+ "step": 20
+ },
+ {
+ "epoch": 0.2889908256880734,
+ "grad_norm": 35.55617141723633,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 1.8053,
+ "step": 21
+ },
+ {
+ "epoch": 0.30275229357798167,
+ "grad_norm": 34.60952377319336,
+ "learning_rate": 1.1e-06,
+ "loss": 1.5654,
+ "step": 22
+ },
+ {
+ "epoch": 0.3165137614678899,
+ "grad_norm": 33.69620895385742,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 1.3454,
+ "step": 23
+ },
+ {
+ "epoch": 0.3302752293577982,
+ "grad_norm": 34.33642578125,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 1.2417,
+ "step": 24
+ },
+ {
+ "epoch": 0.3440366972477064,
+ "grad_norm": 31.23066520690918,
+ "learning_rate": 1.25e-06,
+ "loss": 0.9839,
+ "step": 25
+ },
+ {
+ "epoch": 0.3577981651376147,
+ "grad_norm": 25.810237884521484,
+ "learning_rate": 1.3e-06,
+ "loss": 0.793,
+ "step": 26
+ },
+ {
+ "epoch": 0.37155963302752293,
+ "grad_norm": 23.06692886352539,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 0.6082,
+ "step": 27
+ },
+ {
+ "epoch": 0.3853211009174312,
+ "grad_norm": 19.828439712524414,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 0.4845,
+ "step": 28
+ },
+ {
+ "epoch": 0.39908256880733944,
+ "grad_norm": 14.150300025939941,
+ "learning_rate": 1.45e-06,
+ "loss": 0.348,
+ "step": 29
+ },
+ {
+ "epoch": 0.41284403669724773,
+ "grad_norm": 9.044266700744629,
+ "learning_rate": 1.5e-06,
+ "loss": 0.2516,
+ "step": 30
+ },
+ {
+ "epoch": 0.42660550458715596,
+ "grad_norm": 5.704404354095459,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 0.177,
+ "step": 31
+ },
+ {
+ "epoch": 0.44036697247706424,
+ "grad_norm": 3.2953503131866455,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 0.1391,
+ "step": 32
+ },
+ {
+ "epoch": 0.4541284403669725,
+ "grad_norm": 2.453219413757324,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 0.0982,
+ "step": 33
+ },
+ {
+ "epoch": 0.46788990825688076,
+ "grad_norm": 2.0325512886047363,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 0.0807,
+ "step": 34
+ },
+ {
+ "epoch": 0.481651376146789,
+ "grad_norm": 1.6322681903839111,
+ "learning_rate": 1.75e-06,
+ "loss": 0.0725,
+ "step": 35
+ },
+ {
+ "epoch": 0.4954128440366973,
+ "grad_norm": 0.9713364839553833,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 0.067,
+ "step": 36
+ },
+ {
+ "epoch": 0.5091743119266054,
+ "grad_norm": 0.7980225682258606,
+ "learning_rate": 1.85e-06,
+ "loss": 0.0582,
+ "step": 37
+ },
+ {
+ "epoch": 0.5229357798165137,
+ "grad_norm": 1.0616590976715088,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 0.0562,
+ "step": 38
+ },
+ {
+ "epoch": 0.536697247706422,
+ "grad_norm": 1.053462028503418,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 0.0537,
+ "step": 39
+ },
+ {
+ "epoch": 0.5504587155963303,
+ "grad_norm": 0.9452660083770752,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 0.0602,
+ "step": 40
+ },
+ {
+ "epoch": 0.5642201834862385,
+ "grad_norm": 0.830368161201477,
+ "learning_rate": 2.05e-06,
+ "loss": 0.0549,
+ "step": 41
+ },
+ {
+ "epoch": 0.5779816513761468,
+ "grad_norm": 0.5791187882423401,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 0.0479,
+ "step": 42
+ },
+ {
+ "epoch": 0.591743119266055,
+ "grad_norm": 0.44175243377685547,
+ "learning_rate": 2.15e-06,
+ "loss": 0.0461,
+ "step": 43
+ },
+ {
+ "epoch": 0.6055045871559633,
+ "grad_norm": 0.37655699253082275,
+ "learning_rate": 2.2e-06,
+ "loss": 0.043,
+ "step": 44
+ },
+ {
+ "epoch": 0.6192660550458715,
+ "grad_norm": 0.34382495284080505,
+ "learning_rate": 2.25e-06,
+ "loss": 0.0454,
+ "step": 45
+ },
+ {
+ "epoch": 0.6330275229357798,
+ "grad_norm": 0.5047216415405273,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 0.0437,
+ "step": 46
+ },
+ {
+ "epoch": 0.6467889908256881,
+ "grad_norm": 0.6318779587745667,
+ "learning_rate": 2.35e-06,
+ "loss": 0.0468,
+ "step": 47
+ },
+ {
+ "epoch": 0.6605504587155964,
+ "grad_norm": 0.5135455131530762,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 0.0494,
+ "step": 48
+ },
+ {
+ "epoch": 0.6743119266055045,
+ "grad_norm": 0.4802612066268921,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 0.0441,
+ "step": 49
+ },
+ {
+ "epoch": 0.6880733944954128,
+ "grad_norm": 0.6157718300819397,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0398,
+ "step": 50
+ },
+ {
+ "epoch": 0.7018348623853211,
+ "grad_norm": 0.4327130913734436,
+ "learning_rate": 2.55e-06,
+ "loss": 0.0438,
+ "step": 51
+ },
+ {
+ "epoch": 0.7155963302752294,
+ "grad_norm": 0.46133658289909363,
+ "learning_rate": 2.6e-06,
+ "loss": 0.041,
+ "step": 52
+ },
+ {
+ "epoch": 0.7293577981651376,
+ "grad_norm": 0.5729146003723145,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 0.0406,
+ "step": 53
+ },
+ {
+ "epoch": 0.7431192660550459,
+ "grad_norm": 0.32373812794685364,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 0.0419,
+ "step": 54
+ },
+ {
+ "epoch": 0.7568807339449541,
+ "grad_norm": 0.29006752371788025,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 0.0415,
+ "step": 55
+ },
+ {
+ "epoch": 0.7706422018348624,
+ "grad_norm": 0.31038960814476013,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 0.0344,
+ "step": 56
+ },
+ {
+ "epoch": 0.7844036697247706,
+ "grad_norm": 0.2324836701154709,
+ "learning_rate": 2.85e-06,
+ "loss": 0.0374,
+ "step": 57
+ },
+ {
+ "epoch": 0.7981651376146789,
+ "grad_norm": 0.5083625912666321,
+ "learning_rate": 2.9e-06,
+ "loss": 0.0324,
+ "step": 58
+ },
+ {
+ "epoch": 0.8119266055045872,
+ "grad_norm": 0.2873130142688751,
+ "learning_rate": 2.95e-06,
+ "loss": 0.0403,
+ "step": 59
+ },
+ {
+ "epoch": 0.8256880733944955,
+ "grad_norm": 0.437663197517395,
+ "learning_rate": 3e-06,
+ "loss": 0.0368,
+ "step": 60
+ },
+ {
+ "epoch": 0.8394495412844036,
+ "grad_norm": 0.5645247101783752,
+ "learning_rate": 3.05e-06,
+ "loss": 0.0386,
+ "step": 61
+ },
+ {
+ "epoch": 0.8532110091743119,
+ "grad_norm": 0.40374210476875305,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 0.0425,
+ "step": 62
+ },
+ {
+ "epoch": 0.8669724770642202,
+ "grad_norm": 0.46468955278396606,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 0.0323,
+ "step": 63
+ },
+ {
+ "epoch": 0.8807339449541285,
+ "grad_norm": 0.29952895641326904,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 0.0325,
+ "step": 64
+ },
+ {
+ "epoch": 0.8944954128440367,
+ "grad_norm": 0.3678436279296875,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 0.036,
+ "step": 65
+ },
+ {
+ "epoch": 0.908256880733945,
+ "grad_norm": 0.5068934559822083,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 0.0357,
+ "step": 66
+ },
+ {
+ "epoch": 0.9220183486238532,
+ "grad_norm": 0.2723177671432495,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 0.0333,
+ "step": 67
+ },
+ {
+ "epoch": 0.9357798165137615,
+ "grad_norm": 0.41696834564208984,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 0.0347,
+ "step": 68
+ },
+ {
+ "epoch": 0.9495412844036697,
+ "grad_norm": 0.2582981288433075,
+ "learning_rate": 3.45e-06,
+ "loss": 0.0283,
+ "step": 69
+ },
+ {
+ "epoch": 0.963302752293578,
+ "grad_norm": 0.40648311376571655,
+ "learning_rate": 3.5e-06,
+ "loss": 0.0293,
+ "step": 70
+ },
+ {
+ "epoch": 0.9770642201834863,
+ "grad_norm": 0.4149394631385803,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 0.0311,
+ "step": 71
+ },
+ {
+ "epoch": 0.9908256880733946,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 0.0346,
+ "step": 72
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.65e-06,
+ "loss": 0.0291,
+ "step": 73
+ },
+ {
+ "epoch": 1.0137614678899083,
+ "grad_norm": 0.5016496181488037,
+ "learning_rate": 3.7e-06,
+ "loss": 0.0286,
+ "step": 74
+ },
+ {
+ "epoch": 1.0275229357798166,
+ "grad_norm": 0.3533766567707062,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 0.0321,
+ "step": 75
+ },
+ {
+ "epoch": 1.0412844036697249,
+ "grad_norm": 0.2785470485687256,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 0.0277,
+ "step": 76
+ },
+ {
+ "epoch": 1.0550458715596331,
+ "grad_norm": 0.4530641436576843,
+ "learning_rate": 3.85e-06,
+ "loss": 0.0294,
+ "step": 77
+ },
+ {
+ "epoch": 1.0688073394495412,
+ "grad_norm": 0.3170749843120575,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.0274,
+ "step": 78
+ },
+ {
+ "epoch": 1.0825688073394495,
+ "grad_norm": 0.26502758264541626,
+ "learning_rate": 3.95e-06,
+ "loss": 0.0284,
+ "step": 79
+ },
+ {
+ "epoch": 1.0963302752293578,
+ "grad_norm": 0.5486436486244202,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.0285,
+ "step": 80
+ },
+ {
+ "epoch": 1.110091743119266,
+ "grad_norm": 0.24868083000183105,
+ "learning_rate": 4.05e-06,
+ "loss": 0.0301,
+ "step": 81
+ },
+ {
+ "epoch": 1.1238532110091743,
+ "grad_norm": 0.3448987305164337,
+ "learning_rate": 4.1e-06,
+ "loss": 0.0261,
+ "step": 82
+ },
+ {
+ "epoch": 1.1376146788990826,
+ "grad_norm": 0.3330553472042084,
+ "learning_rate": 4.15e-06,
+ "loss": 0.03,
+ "step": 83
+ },
+ {
+ "epoch": 1.151376146788991,
+ "grad_norm": 0.3379852771759033,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.0286,
+ "step": 84
+ },
+ {
+ "epoch": 1.165137614678899,
+ "grad_norm": 0.23678433895111084,
+ "learning_rate": 4.25e-06,
+ "loss": 0.0245,
+ "step": 85
+ },
+ {
+ "epoch": 1.1788990825688073,
+ "grad_norm": 0.24502314627170563,
+ "learning_rate": 4.3e-06,
+ "loss": 0.0267,
+ "step": 86
+ },
+ {
+ "epoch": 1.1926605504587156,
+ "grad_norm": 0.34288597106933594,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.0259,
+ "step": 87
+ },
+ {
+ "epoch": 1.2064220183486238,
+ "grad_norm": 0.20595045387744904,
+ "learning_rate": 4.4e-06,
+ "loss": 0.0241,
+ "step": 88
+ },
+ {
+ "epoch": 1.2201834862385321,
+ "grad_norm": 0.28399360179901123,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.0277,
+ "step": 89
+ },
+ {
+ "epoch": 1.2339449541284404,
+ "grad_norm": 0.273929238319397,
+ "learning_rate": 4.5e-06,
+ "loss": 0.0261,
+ "step": 90
+ },
+ {
+ "epoch": 1.2477064220183487,
+ "grad_norm": 0.24288330972194672,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.0267,
+ "step": 91
+ },
+ {
+ "epoch": 1.261467889908257,
+ "grad_norm": 0.42502400279045105,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.0252,
+ "step": 92
+ },
+ {
+ "epoch": 1.2752293577981653,
+ "grad_norm": 0.17670072615146637,
+ "learning_rate": 4.65e-06,
+ "loss": 0.0231,
+ "step": 93
+ },
+ {
+ "epoch": 1.2889908256880733,
+ "grad_norm": 0.23585423827171326,
+ "learning_rate": 4.7e-06,
+ "loss": 0.0213,
+ "step": 94
+ },
+ {
+ "epoch": 1.3027522935779816,
+ "grad_norm": 0.32558879256248474,
+ "learning_rate": 4.75e-06,
+ "loss": 0.0226,
+ "step": 95
+ },
+ {
+ "epoch": 1.31651376146789,
+ "grad_norm": 0.2908780872821808,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.0274,
+ "step": 96
+ },
+ {
+ "epoch": 1.3302752293577982,
+ "grad_norm": 0.3725607991218567,
+ "learning_rate": 4.85e-06,
+ "loss": 0.0241,
+ "step": 97
+ },
+ {
+ "epoch": 1.3440366972477065,
+ "grad_norm": 0.3833301067352295,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.0252,
+ "step": 98
+ },
+ {
+ "epoch": 1.3577981651376148,
+ "grad_norm": 0.32000771164894104,
+ "learning_rate": 4.95e-06,
+ "loss": 0.0236,
+ "step": 99
+ },
+ {
+ "epoch": 1.3715596330275228,
+ "grad_norm": 0.3203510344028473,
+ "learning_rate": 5e-06,
+ "loss": 0.0235,
+ "step": 100
+ },
+ {
+ "epoch": 1.385321100917431,
+ "grad_norm": 0.20016217231750488,
+ "learning_rate": 4.999888074163108e-06,
+ "loss": 0.0218,
+ "step": 101
+ },
+ {
+ "epoch": 1.3990825688073394,
+ "grad_norm": 0.3012026250362396,
+ "learning_rate": 4.999552306674345e-06,
+ "loss": 0.0234,
+ "step": 102
+ },
+ {
+ "epoch": 1.4128440366972477,
+ "grad_norm": 0.22818222641944885,
+ "learning_rate": 4.998992727598557e-06,
+ "loss": 0.0212,
+ "step": 103
+ },
+ {
+ "epoch": 1.426605504587156,
+ "grad_norm": 0.24128392338752747,
+ "learning_rate": 4.998209387040829e-06,
+ "loss": 0.0207,
+ "step": 104
+ },
+ {
+ "epoch": 1.4403669724770642,
+ "grad_norm": 0.3475555181503296,
+ "learning_rate": 4.9972023551419995e-06,
+ "loss": 0.0246,
+ "step": 105
+ },
+ {
+ "epoch": 1.4541284403669725,
+ "grad_norm": 0.21223627030849457,
+ "learning_rate": 4.995971722072379e-06,
+ "loss": 0.0237,
+ "step": 106
+ },
+ {
+ "epoch": 1.4678899082568808,
+ "grad_norm": 0.3776336908340454,
+ "learning_rate": 4.9945175980236745e-06,
+ "loss": 0.0218,
+ "step": 107
+ },
+ {
+ "epoch": 1.481651376146789,
+ "grad_norm": 0.25027793645858765,
+ "learning_rate": 4.992840113199131e-06,
+ "loss": 0.0214,
+ "step": 108
+ },
+ {
+ "epoch": 1.4954128440366974,
+ "grad_norm": 0.2559281885623932,
+ "learning_rate": 4.990939417801859e-06,
+ "loss": 0.0213,
+ "step": 109
+ },
+ {
+ "epoch": 1.5091743119266054,
+ "grad_norm": 0.33694687485694885,
+ "learning_rate": 4.988815682021398e-06,
+ "loss": 0.0182,
+ "step": 110
+ },
+ {
+ "epoch": 1.5229357798165137,
+ "grad_norm": 0.3175147473812103,
+ "learning_rate": 4.986469096018472e-06,
+ "loss": 0.0213,
+ "step": 111
+ },
+ {
+ "epoch": 1.536697247706422,
+ "grad_norm": 0.48777177929878235,
+ "learning_rate": 4.983899869907963e-06,
+ "loss": 0.0186,
+ "step": 112
+ },
+ {
+ "epoch": 1.5504587155963303,
+ "grad_norm": 0.34633246064186096,
+ "learning_rate": 4.981108233740096e-06,
+ "loss": 0.0224,
+ "step": 113
+ },
+ {
+ "epoch": 1.5642201834862384,
+ "grad_norm": 0.17302758991718292,
+ "learning_rate": 4.978094437479843e-06,
+ "loss": 0.0189,
+ "step": 114
+ },
+ {
+ "epoch": 1.5779816513761467,
+ "grad_norm": 0.24110931158065796,
+ "learning_rate": 4.97485875098454e-06,
+ "loss": 0.0174,
+ "step": 115
+ },
+ {
+ "epoch": 1.591743119266055,
+ "grad_norm": 0.2965283989906311,
+ "learning_rate": 4.971401463979722e-06,
+ "loss": 0.0174,
+ "step": 116
+ },
+ {
+ "epoch": 1.6055045871559632,
+ "grad_norm": 0.34184500575065613,
+ "learning_rate": 4.967722886033181e-06,
+ "loss": 0.0162,
+ "step": 117
+ },
+ {
+ "epoch": 1.6192660550458715,
+ "grad_norm": 0.3942873179912567,
+ "learning_rate": 4.963823346527249e-06,
+ "loss": 0.0176,
+ "step": 118
+ },
+ {
+ "epoch": 1.6330275229357798,
+ "grad_norm": 0.21840929985046387,
+ "learning_rate": 4.959703194629304e-06,
+ "loss": 0.018,
+ "step": 119
+ },
+ {
+ "epoch": 1.646788990825688,
+ "grad_norm": 0.23672759532928467,
+ "learning_rate": 4.955362799260507e-06,
+ "loss": 0.0204,
+ "step": 120
+ },
+ {
+ "epoch": 1.6605504587155964,
+ "grad_norm": 0.18776445090770721,
+ "learning_rate": 4.950802549062764e-06,
+ "loss": 0.0173,
+ "step": 121
+ },
+ {
+ "epoch": 1.6743119266055047,
+ "grad_norm": 0.279297411441803,
+ "learning_rate": 4.946022852363932e-06,
+ "loss": 0.0177,
+ "step": 122
+ },
+ {
+ "epoch": 1.688073394495413,
+ "grad_norm": 0.20893588662147522,
+ "learning_rate": 4.9410241371412525e-06,
+ "loss": 0.0189,
+ "step": 123
+ },
+ {
+ "epoch": 1.7018348623853212,
+ "grad_norm": 0.26409876346588135,
+ "learning_rate": 4.935806850983034e-06,
+ "loss": 0.0195,
+ "step": 124
+ },
+ {
+ "epoch": 1.7155963302752295,
+ "grad_norm": 0.23863324522972107,
+ "learning_rate": 4.9303714610485705e-06,
+ "loss": 0.0151,
+ "step": 125
+ },
+ {
+ "epoch": 1.7293577981651376,
+ "grad_norm": 0.25934213399887085,
+ "learning_rate": 4.924718454026318e-06,
+ "loss": 0.0157,
+ "step": 126
+ },
+ {
+ "epoch": 1.7431192660550459,
+ "grad_norm": 0.2923693358898163,
+ "learning_rate": 4.918848336090309e-06,
+ "loss": 0.0155,
+ "step": 127
+ },
+ {
+ "epoch": 1.7568807339449541,
+ "grad_norm": 0.16973069310188293,
+ "learning_rate": 4.912761632854834e-06,
+ "loss": 0.0156,
+ "step": 128
+ },
+ {
+ "epoch": 1.7706422018348624,
+ "grad_norm": 0.25908610224723816,
+ "learning_rate": 4.906458889327375e-06,
+ "loss": 0.0159,
+ "step": 129
+ },
+ {
+ "epoch": 1.7844036697247705,
+ "grad_norm": 0.27444136142730713,
+ "learning_rate": 4.899940669859807e-06,
+ "loss": 0.0159,
+ "step": 130
+ },
+ {
+ "epoch": 1.7981651376146788,
+ "grad_norm": 0.25167539715766907,
+ "learning_rate": 4.893207558097867e-06,
+ "loss": 0.0147,
+ "step": 131
+ },
+ {
+ "epoch": 1.811926605504587,
+ "grad_norm": 0.25777608156204224,
+ "learning_rate": 4.8862601569288885e-06,
+ "loss": 0.016,
+ "step": 132
+ },
+ {
+ "epoch": 1.8256880733944953,
+ "grad_norm": 0.24190428853034973,
+ "learning_rate": 4.879099088427824e-06,
+ "loss": 0.0128,
+ "step": 133
+ },
+ {
+ "epoch": 1.8394495412844036,
+ "grad_norm": 0.17028410732746124,
+ "learning_rate": 4.871724993801541e-06,
+ "loss": 0.0145,
+ "step": 134
+ },
+ {
+ "epoch": 1.853211009174312,
+ "grad_norm": 0.25141075253486633,
+ "learning_rate": 4.864138533331411e-06,
+ "loss": 0.0144,
+ "step": 135
+ },
+ {
+ "epoch": 1.8669724770642202,
+ "grad_norm": 0.6400424838066101,
+ "learning_rate": 4.8563403863141825e-06,
+ "loss": 0.0128,
+ "step": 136
+ },
+ {
+ "epoch": 1.8807339449541285,
+ "grad_norm": 0.22450514137744904,
+ "learning_rate": 4.84833125100116e-06,
+ "loss": 0.0167,
+ "step": 137
+ },
+ {
+ "epoch": 1.8944954128440368,
+ "grad_norm": 0.19940154254436493,
+ "learning_rate": 4.840111844535682e-06,
+ "loss": 0.0154,
+ "step": 138
+ },
+ {
+ "epoch": 1.908256880733945,
+ "grad_norm": 0.2946206033229828,
+ "learning_rate": 4.8316829028889076e-06,
+ "loss": 0.0158,
+ "step": 139
+ },
+ {
+ "epoch": 1.9220183486238533,
+ "grad_norm": 0.4694023132324219,
+ "learning_rate": 4.823045180793914e-06,
+ "loss": 0.0153,
+ "step": 140
+ },
+ {
+ "epoch": 1.9357798165137616,
+ "grad_norm": 0.23655226826667786,
+ "learning_rate": 4.8141994516781196e-06,
+ "loss": 0.0109,
+ "step": 141
+ },
+ {
+ "epoch": 1.9495412844036697,
+ "grad_norm": 0.23846553266048431,
+ "learning_rate": 4.805146507594034e-06,
+ "loss": 0.0129,
+ "step": 142
+ },
+ {
+ "epoch": 1.963302752293578,
+ "grad_norm": 0.2763686180114746,
+ "learning_rate": 4.7958871591483305e-06,
+ "loss": 0.0124,
+ "step": 143
+ },
+ {
+ "epoch": 1.9770642201834863,
+ "grad_norm": 0.23522883653640747,
+ "learning_rate": 4.786422235429269e-06,
+ "loss": 0.0125,
+ "step": 144
+ },
+ {
+ "epoch": 1.9908256880733946,
+ "grad_norm": 0.27468597888946533,
+ "learning_rate": 4.776752583932455e-06,
+ "loss": 0.0137,
+ "step": 145
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.26614058017730713,
+ "learning_rate": 4.766879070484957e-06,
+ "loss": 0.0113,
+ "step": 146
+ },
+ {
+ "epoch": 2.0137614678899083,
+ "grad_norm": 0.2862620949745178,
+ "learning_rate": 4.756802579167781e-06,
+ "loss": 0.0104,
+ "step": 147
+ },
+ {
+ "epoch": 2.0275229357798166,
+ "grad_norm": 0.1770494133234024,
+ "learning_rate": 4.746524012236706e-06,
+ "loss": 0.0102,
+ "step": 148
+ },
+ {
+ "epoch": 2.041284403669725,
+ "grad_norm": 0.1850830465555191,
+ "learning_rate": 4.736044290041496e-06,
+ "loss": 0.0125,
+ "step": 149
+ },
+ {
+ "epoch": 2.055045871559633,
+ "grad_norm": 0.20552438497543335,
+ "learning_rate": 4.725364350943492e-06,
+ "loss": 0.0091,
+ "step": 150
+ },
+ {
+ "epoch": 2.0688073394495414,
+ "grad_norm": 0.12979158759117126,
+ "learning_rate": 4.714485151231593e-06,
+ "loss": 0.0113,
+ "step": 151
+ },
+ {
+ "epoch": 2.0825688073394497,
+ "grad_norm": 0.16992178559303284,
+ "learning_rate": 4.703407665036622e-06,
+ "loss": 0.0075,
+ "step": 152
+ },
+ {
+ "epoch": 2.096330275229358,
+ "grad_norm": 0.1473352611064911,
+ "learning_rate": 4.692132884244113e-06,
+ "loss": 0.0091,
+ "step": 153
+ },
+ {
+ "epoch": 2.1100917431192663,
+ "grad_norm": 0.24198868870735168,
+ "learning_rate": 4.680661818405485e-06,
+ "loss": 0.0073,
+ "step": 154
+ },
+ {
+ "epoch": 2.123853211009174,
+ "grad_norm": 0.25488734245300293,
+ "learning_rate": 4.668995494647653e-06,
+ "loss": 0.0098,
+ "step": 155
+ },
+ {
+ "epoch": 2.1376146788990824,
+ "grad_norm": 0.17986920475959778,
+ "learning_rate": 4.657134957581057e-06,
+ "loss": 0.0081,
+ "step": 156
+ },
+ {
+ "epoch": 2.1513761467889907,
+ "grad_norm": 0.22465726733207703,
+ "learning_rate": 4.645081269206128e-06,
+ "loss": 0.0074,
+ "step": 157
+ },
+ {
+ "epoch": 2.165137614678899,
+ "grad_norm": 0.19391046464443207,
+ "learning_rate": 4.632835508818192e-06,
+ "loss": 0.0077,
+ "step": 158
+ },
+ {
+ "epoch": 2.1788990825688073,
+ "grad_norm": 0.25997886061668396,
+ "learning_rate": 4.620398772910833e-06,
+ "loss": 0.0074,
+ "step": 159
+ },
+ {
+ "epoch": 2.1926605504587156,
+ "grad_norm": 0.2422141134738922,
+ "learning_rate": 4.607772175077712e-06,
+ "loss": 0.0103,
+ "step": 160
+ },
+ {
+ "epoch": 2.206422018348624,
+ "grad_norm": 0.2785587012767792,
+ "learning_rate": 4.59495684591285e-06,
+ "loss": 0.0079,
+ "step": 161
+ },
+ {
+ "epoch": 2.220183486238532,
+ "grad_norm": 0.19141560792922974,
+ "learning_rate": 4.581953932909403e-06,
+ "loss": 0.0068,
+ "step": 162
+ },
+ {
+ "epoch": 2.2339449541284404,
+ "grad_norm": 0.152049720287323,
+ "learning_rate": 4.5687646003569055e-06,
+ "loss": 0.0063,
+ "step": 163
+ },
+ {
+ "epoch": 2.2477064220183487,
+ "grad_norm": 0.19313585758209229,
+ "learning_rate": 4.555390029237026e-06,
+ "loss": 0.0079,
+ "step": 164
+ },
+ {
+ "epoch": 2.261467889908257,
+ "grad_norm": 0.19979022443294525,
+ "learning_rate": 4.541831417117815e-06,
+ "loss": 0.0085,
+ "step": 165
+ },
+ {
+ "epoch": 2.2752293577981653,
+ "grad_norm": 0.2902522683143616,
+ "learning_rate": 4.528089978046481e-06,
+ "loss": 0.0082,
+ "step": 166
+ },
+ {
+ "epoch": 2.2889908256880735,
+ "grad_norm": 0.5130491256713867,
+ "learning_rate": 4.514166942440679e-06,
+ "loss": 0.0067,
+ "step": 167
+ },
+ {
+ "epoch": 2.302752293577982,
+ "grad_norm": 0.15980036556720734,
+ "learning_rate": 4.5000635569783365e-06,
+ "loss": 0.0093,
+ "step": 168
+ },
+ {
+ "epoch": 2.31651376146789,
+ "grad_norm": 0.19247184693813324,
+ "learning_rate": 4.4857810844860325e-06,
+ "loss": 0.0101,
+ "step": 169
+ },
+ {
+ "epoch": 2.330275229357798,
+ "grad_norm": 0.19173133373260498,
+ "learning_rate": 4.471320803825915e-06,
+ "loss": 0.0065,
+ "step": 170
+ },
+ {
+ "epoch": 2.3440366972477062,
+ "grad_norm": 0.1779325157403946,
+ "learning_rate": 4.4566840097811956e-06,
+ "loss": 0.0059,
+ "step": 171
+ },
+ {
+ "epoch": 2.3577981651376145,
+ "grad_norm": 0.1639624983072281,
+ "learning_rate": 4.4418720129402145e-06,
+ "loss": 0.0052,
+ "step": 172
+ },
+ {
+ "epoch": 2.371559633027523,
+ "grad_norm": 0.2355070263147354,
+ "learning_rate": 4.426886139579083e-06,
+ "loss": 0.0046,
+ "step": 173
+ },
+ {
+ "epoch": 2.385321100917431,
+ "grad_norm": 0.20461603999137878,
+ "learning_rate": 4.411727731542937e-06,
+ "loss": 0.0071,
+ "step": 174
+ },
+ {
+ "epoch": 2.3990825688073394,
+ "grad_norm": 0.20251843333244324,
+ "learning_rate": 4.39639814612578e-06,
+ "loss": 0.0054,
+ "step": 175
+ },
+ {
+ "epoch": 2.4128440366972477,
+ "grad_norm": 0.17055197060108185,
+ "learning_rate": 4.3808987559489536e-06,
+ "loss": 0.0052,
+ "step": 176
+ },
+ {
+ "epoch": 2.426605504587156,
+ "grad_norm": 0.2508833706378937,
+ "learning_rate": 4.365230948838232e-06,
+ "loss": 0.0071,
+ "step": 177
+ },
+ {
+ "epoch": 2.4403669724770642,
+ "grad_norm": 0.21865351498126984,
+ "learning_rate": 4.349396127699552e-06,
+ "loss": 0.0068,
+ "step": 178
+ },
+ {
+ "epoch": 2.4541284403669725,
+ "grad_norm": 0.225867360830307,
+ "learning_rate": 4.3333957103934025e-06,
+ "loss": 0.0072,
+ "step": 179
+ },
+ {
+ "epoch": 2.467889908256881,
+ "grad_norm": 0.48035845160484314,
+ "learning_rate": 4.317231129607859e-06,
+ "loss": 0.006,
+ "step": 180
+ },
+ {
+ "epoch": 2.481651376146789,
+ "grad_norm": 0.17027413845062256,
+ "learning_rate": 4.30090383273031e-06,
+ "loss": 0.0043,
+ "step": 181
+ },
+ {
+ "epoch": 2.4954128440366974,
+ "grad_norm": 0.2660333216190338,
+ "learning_rate": 4.2844152817178476e-06,
+ "loss": 0.0082,
+ "step": 182
+ },
+ {
+ "epoch": 2.5091743119266052,
+ "grad_norm": 0.2485552728176117,
+ "learning_rate": 4.267766952966369e-06,
+ "loss": 0.0059,
+ "step": 183
+ },
+ {
+ "epoch": 2.522935779816514,
+ "grad_norm": 0.2048122137784958,
+ "learning_rate": 4.2509603371783776e-06,
+ "loss": 0.0071,
+ "step": 184
+ },
+ {
+ "epoch": 2.536697247706422,
+ "grad_norm": 0.1725567877292633,
+ "learning_rate": 4.233996939229502e-06,
+ "loss": 0.0053,
+ "step": 185
+ },
+ {
+ "epoch": 2.5504587155963305,
+ "grad_norm": 0.14592835307121277,
+ "learning_rate": 4.216878278033753e-06,
+ "loss": 0.0044,
+ "step": 186
+ },
+ {
+ "epoch": 2.5642201834862384,
+ "grad_norm": 0.2868310213088989,
+ "learning_rate": 4.199605886407515e-06,
+ "loss": 0.0057,
+ "step": 187
+ },
+ {
+ "epoch": 2.5779816513761467,
+ "grad_norm": 0.5002567172050476,
+ "learning_rate": 4.1821813109322975e-06,
+ "loss": 0.0072,
+ "step": 188
+ },
+ {
+ "epoch": 2.591743119266055,
+ "grad_norm": 0.199919655919075,
+ "learning_rate": 4.164606111816256e-06,
+ "loss": 0.0043,
+ "step": 189
+ },
+ {
+ "epoch": 2.6055045871559632,
+ "grad_norm": 0.17280228435993195,
+ "learning_rate": 4.146881862754485e-06,
+ "loss": 0.0048,
+ "step": 190
+ },
+ {
+ "epoch": 2.6192660550458715,
+ "grad_norm": 0.17505577206611633,
+ "learning_rate": 4.129010150788112e-06,
+ "loss": 0.0037,
+ "step": 191
+ },
+ {
+ "epoch": 2.63302752293578,
+ "grad_norm": 0.21955189108848572,
+ "learning_rate": 4.110992576162193e-06,
+ "loss": 0.0044,
+ "step": 192
+ },
+ {
+ "epoch": 2.646788990825688,
+ "grad_norm": 0.22081787884235382,
+ "learning_rate": 4.092830752182423e-06,
+ "loss": 0.0036,
+ "step": 193
+ },
+ {
+ "epoch": 2.6605504587155964,
+ "grad_norm": 0.26361310482025146,
+ "learning_rate": 4.074526305070679e-06,
+ "loss": 0.0062,
+ "step": 194
+ },
+ {
+ "epoch": 2.6743119266055047,
+ "grad_norm": 0.15528841316699982,
+ "learning_rate": 4.056080873819412e-06,
+ "loss": 0.0029,
+ "step": 195
+ },
+ {
+ "epoch": 2.688073394495413,
+ "grad_norm": 6.002184867858887,
+ "learning_rate": 4.037496110044885e-06,
+ "loss": 0.0062,
+ "step": 196
+ },
+ {
+ "epoch": 2.7018348623853212,
+ "grad_norm": 0.12827160954475403,
+ "learning_rate": 4.018773677839289e-06,
+ "loss": 0.0025,
+ "step": 197
+ },
+ {
+ "epoch": 2.7155963302752295,
+ "grad_norm": 0.21857935190200806,
+ "learning_rate": 3.999915253621739e-06,
+ "loss": 0.004,
+ "step": 198
+ },
+ {
+ "epoch": 2.729357798165138,
+ "grad_norm": 0.231048583984375,
+ "learning_rate": 3.980922525988167e-06,
+ "loss": 0.0034,
+ "step": 199
+ },
+ {
+ "epoch": 2.7431192660550456,
+ "grad_norm": 0.16589054465293884,
+ "learning_rate": 3.961797195560118e-06,
+ "loss": 0.0045,
+ "step": 200
+ },
+ {
+ "epoch": 2.7568807339449544,
+ "grad_norm": 0.2721656858921051,
+ "learning_rate": 3.942540974832486e-06,
+ "loss": 0.0056,
+ "step": 201
+ },
+ {
+ "epoch": 2.770642201834862,
+ "grad_norm": 0.12930598855018616,
+ "learning_rate": 3.9231555880201655e-06,
+ "loss": 0.0037,
+ "step": 202
+ },
+ {
+ "epoch": 2.7844036697247705,
+ "grad_norm": 0.299055814743042,
+ "learning_rate": 3.903642770903671e-06,
+ "loss": 0.0036,
+ "step": 203
+ },
+ {
+ "epoch": 2.7981651376146788,
+ "grad_norm": 0.15592966973781586,
+ "learning_rate": 3.884004270673711e-06,
+ "loss": 0.0023,
+ "step": 204
+ },
+ {
+ "epoch": 2.811926605504587,
+ "grad_norm": 0.10496195405721664,
+ "learning_rate": 3.864241845774746e-06,
+ "loss": 0.0029,
+ "step": 205
+ },
+ {
+ "epoch": 2.8256880733944953,
+ "grad_norm": 0.1604638695716858,
+ "learning_rate": 3.844357265747531e-06,
+ "loss": 0.0032,
+ "step": 206
+ },
+ {
+ "epoch": 2.8394495412844036,
+ "grad_norm": 0.1436438113451004,
+ "learning_rate": 3.8243523110706736e-06,
+ "loss": 0.0035,
+ "step": 207
+ },
+ {
+ "epoch": 2.853211009174312,
+ "grad_norm": 0.176204651594162,
+ "learning_rate": 3.8042287730012117e-06,
+ "loss": 0.0022,
+ "step": 208
+ },
+ {
+ "epoch": 2.86697247706422,
+ "grad_norm": 0.3500923216342926,
+ "learning_rate": 3.7839884534142157e-06,
+ "loss": 0.0022,
+ "step": 209
+ },
+ {
+ "epoch": 2.8807339449541285,
+ "grad_norm": 0.2319999784231186,
+ "learning_rate": 3.7636331646414524e-06,
+ "loss": 0.003,
+ "step": 210
+ },
+ {
+ "epoch": 2.8944954128440368,
+ "grad_norm": 0.1358226090669632,
+ "learning_rate": 3.7431647293091076e-06,
+ "loss": 0.0023,
+ "step": 211
+ },
+ {
+ "epoch": 2.908256880733945,
+ "grad_norm": 0.1323612779378891,
+ "learning_rate": 3.7225849801745835e-06,
+ "loss": 0.0021,
+ "step": 212
+ },
+ {
+ "epoch": 2.9220183486238533,
+ "grad_norm": 0.10968377441167831,
+ "learning_rate": 3.701895759962397e-06,
+ "loss": 0.002,
+ "step": 213
+ },
+ {
+ "epoch": 2.9357798165137616,
+ "grad_norm": 0.11163649708032608,
+ "learning_rate": 3.6810989211991777e-06,
+ "loss": 0.0015,
+ "step": 214
+ },
+ {
+ "epoch": 2.9495412844036695,
+ "grad_norm": 0.6103344559669495,
+ "learning_rate": 3.6601963260477923e-06,
+ "loss": 0.0051,
+ "step": 215
+ },
+ {
+ "epoch": 2.963302752293578,
+ "grad_norm": 0.1300484985113144,
+ "learning_rate": 3.6391898461406045e-06,
+ "loss": 0.0018,
+ "step": 216
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 432,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 72,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 1.384563997852618e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-216/training_args.bin b/checkpoint-216/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6b48f4b52f5bfff81ec8534f6510460a8014f336
--- /dev/null
+++ b/checkpoint-216/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cbafef0713d4b794ca3a92a04d378baaf3fa7647c9af95948bfb2ef7c0e02eda
+size 7928
diff --git a/checkpoint-216/zero_to_fp32.py b/checkpoint-216/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-216/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-288/README.md b/checkpoint-288/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b184114a0c28ed3e4c082c18486736dc818166d
--- /dev/null
+++ b/checkpoint-288/README.md
@@ -0,0 +1,202 @@
+---
+base_model: meta-llama/Llama-3.3-70B-Instruct
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-288/adapter_config.json b/checkpoint-288/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/checkpoint-288/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-288/adapter_model.safetensors b/checkpoint-288/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..a1945d56e7a2de5476cf60d110054ff748876880
--- /dev/null
+++ b/checkpoint-288/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:243a7168bf8ca03ab60c5efb71887b7da8e0f26155c80686d082d44a0075463e
+size 10829849744
diff --git a/checkpoint-288/global_step287/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-288/global_step287/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..7629797d77d0f3f0817680a8e09a65f9299a1bd9
--- /dev/null
+++ b/checkpoint-288/global_step287/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:53668369e598a6ddb1af4f6f61596b2bae7b9aa52391a2db3e2721e877744f38
+size 21659418140
diff --git a/checkpoint-288/global_step287/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-288/global_step287/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a8cb9dd85f0086f9f65d562d2a6339ab8ec307a0
--- /dev/null
+++ b/checkpoint-288/global_step287/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f030e11e29cebe953fccacd0b9239059fe2278cdfa6b185f8fe228c544148c02
+size 21659457372
diff --git a/checkpoint-288/global_step287/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/checkpoint-288/global_step287/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..1e9800b59bdb8e34ffb58a8f01fafbb7da059d58
--- /dev/null
+++ b/checkpoint-288/global_step287/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8d7e206c6dc71c81cbdd2af1fe8e0c9b17dec8447a2928cb0f218fc211b59cb8
+size 21659417820
diff --git a/checkpoint-288/global_step287/mp_rank_00_model_states.pt b/checkpoint-288/global_step287/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0240cfc3ce198f75dcba83842abe0339d9f4a0ab
--- /dev/null
+++ b/checkpoint-288/global_step287/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5ff0128130e879f814ee22343fcd5c10dd2e39d2e5dbccb106c566e97280b801
+size 11918643933
diff --git a/checkpoint-288/latest b/checkpoint-288/latest
new file mode 100644
index 0000000000000000000000000000000000000000..1d8bd23b1166fdb4341879dbe673e1dcadd02176
--- /dev/null
+++ b/checkpoint-288/latest
@@ -0,0 +1 @@
+global_step287
\ No newline at end of file
diff --git a/checkpoint-288/rng_state_0.pth b/checkpoint-288/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a9d12a2b7059294a9ee5a2e76cff72b52575ea21
--- /dev/null
+++ b/checkpoint-288/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6e99f196c1b4b706c8375e927028b56de544c5213116c1c6ea455e1989af6f32
+size 14768
diff --git a/checkpoint-288/rng_state_1.pth b/checkpoint-288/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..38ee89fa1599169236c496f61b58a6086e7cd527
--- /dev/null
+++ b/checkpoint-288/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:7d42007cbc8740f85d57679704c16977e5ae7a0ae106fe5f18dce5146122f99e
+size 14768
diff --git a/checkpoint-288/rng_state_2.pth b/checkpoint-288/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..5af2ff66691c9aff3a568be3e6b93628d75e0ea4
--- /dev/null
+++ b/checkpoint-288/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b7e0f6d2ed89823737f1766899e98d801268cc6e6854b8c93d2ae884f8796015
+size 14768
diff --git a/checkpoint-288/scheduler.pt b/checkpoint-288/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..7628c0024597416a8211ffbd5a68418d83a678eb
--- /dev/null
+++ b/checkpoint-288/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b69b4d3a079847fc3286aa16b458c66149593b0a314e964b178b2d2904a96b7b
+size 1064
diff --git a/checkpoint-288/special_tokens_map.json b/checkpoint-288/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-288/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-288/tokenizer.json b/checkpoint-288/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-288/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-288/tokenizer_config.json b/checkpoint-288/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/checkpoint-288/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-288/trainer_state.json b/checkpoint-288/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..47730772699ca703ba9d7cfc61dab11406357114
--- /dev/null
+++ b/checkpoint-288/trainer_state.json
@@ -0,0 +1,2049 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 3.9495412844036695,
+ "eval_steps": 500,
+ "global_step": 288,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.013761467889908258,
+ "grad_norm": 38.02450942993164,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 3.125,
+ "step": 1
+ },
+ {
+ "epoch": 0.027522935779816515,
+ "grad_norm": 37.864768981933594,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 3.0998,
+ "step": 2
+ },
+ {
+ "epoch": 0.04128440366972477,
+ "grad_norm": 38.34700012207031,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 3.1533,
+ "step": 3
+ },
+ {
+ "epoch": 0.05504587155963303,
+ "grad_norm": 38.33641815185547,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 3.1542,
+ "step": 4
+ },
+ {
+ "epoch": 0.06880733944954129,
+ "grad_norm": 38.064449310302734,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 3.1153,
+ "step": 5
+ },
+ {
+ "epoch": 0.08256880733944955,
+ "grad_norm": 37.92089080810547,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 3.0867,
+ "step": 6
+ },
+ {
+ "epoch": 0.0963302752293578,
+ "grad_norm": 38.120323181152344,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 3.093,
+ "step": 7
+ },
+ {
+ "epoch": 0.11009174311926606,
+ "grad_norm": 38.47222900390625,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 3.1056,
+ "step": 8
+ },
+ {
+ "epoch": 0.12385321100917432,
+ "grad_norm": 38.013702392578125,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 3.0474,
+ "step": 9
+ },
+ {
+ "epoch": 0.13761467889908258,
+ "grad_norm": 38.17593002319336,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 3.0264,
+ "step": 10
+ },
+ {
+ "epoch": 0.15137614678899083,
+ "grad_norm": 38.60066604614258,
+ "learning_rate": 5.5e-07,
+ "loss": 2.9404,
+ "step": 11
+ },
+ {
+ "epoch": 0.1651376146788991,
+ "grad_norm": 38.83498764038086,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 2.9571,
+ "step": 12
+ },
+ {
+ "epoch": 0.17889908256880735,
+ "grad_norm": 38.942047119140625,
+ "learning_rate": 6.5e-07,
+ "loss": 2.8849,
+ "step": 13
+ },
+ {
+ "epoch": 0.1926605504587156,
+ "grad_norm": 38.0286865234375,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 2.7486,
+ "step": 14
+ },
+ {
+ "epoch": 0.20642201834862386,
+ "grad_norm": 38.31856155395508,
+ "learning_rate": 7.5e-07,
+ "loss": 2.6876,
+ "step": 15
+ },
+ {
+ "epoch": 0.22018348623853212,
+ "grad_norm": 38.124759674072266,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 2.5992,
+ "step": 16
+ },
+ {
+ "epoch": 0.23394495412844038,
+ "grad_norm": 36.59762191772461,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 2.4063,
+ "step": 17
+ },
+ {
+ "epoch": 0.24770642201834864,
+ "grad_norm": 36.63874435424805,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 2.3109,
+ "step": 18
+ },
+ {
+ "epoch": 0.26146788990825687,
+ "grad_norm": 36.768577575683594,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 2.1677,
+ "step": 19
+ },
+ {
+ "epoch": 0.27522935779816515,
+ "grad_norm": 36.187137603759766,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 1.9551,
+ "step": 20
+ },
+ {
+ "epoch": 0.2889908256880734,
+ "grad_norm": 35.55617141723633,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 1.8053,
+ "step": 21
+ },
+ {
+ "epoch": 0.30275229357798167,
+ "grad_norm": 34.60952377319336,
+ "learning_rate": 1.1e-06,
+ "loss": 1.5654,
+ "step": 22
+ },
+ {
+ "epoch": 0.3165137614678899,
+ "grad_norm": 33.69620895385742,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 1.3454,
+ "step": 23
+ },
+ {
+ "epoch": 0.3302752293577982,
+ "grad_norm": 34.33642578125,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 1.2417,
+ "step": 24
+ },
+ {
+ "epoch": 0.3440366972477064,
+ "grad_norm": 31.23066520690918,
+ "learning_rate": 1.25e-06,
+ "loss": 0.9839,
+ "step": 25
+ },
+ {
+ "epoch": 0.3577981651376147,
+ "grad_norm": 25.810237884521484,
+ "learning_rate": 1.3e-06,
+ "loss": 0.793,
+ "step": 26
+ },
+ {
+ "epoch": 0.37155963302752293,
+ "grad_norm": 23.06692886352539,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 0.6082,
+ "step": 27
+ },
+ {
+ "epoch": 0.3853211009174312,
+ "grad_norm": 19.828439712524414,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 0.4845,
+ "step": 28
+ },
+ {
+ "epoch": 0.39908256880733944,
+ "grad_norm": 14.150300025939941,
+ "learning_rate": 1.45e-06,
+ "loss": 0.348,
+ "step": 29
+ },
+ {
+ "epoch": 0.41284403669724773,
+ "grad_norm": 9.044266700744629,
+ "learning_rate": 1.5e-06,
+ "loss": 0.2516,
+ "step": 30
+ },
+ {
+ "epoch": 0.42660550458715596,
+ "grad_norm": 5.704404354095459,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 0.177,
+ "step": 31
+ },
+ {
+ "epoch": 0.44036697247706424,
+ "grad_norm": 3.2953503131866455,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 0.1391,
+ "step": 32
+ },
+ {
+ "epoch": 0.4541284403669725,
+ "grad_norm": 2.453219413757324,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 0.0982,
+ "step": 33
+ },
+ {
+ "epoch": 0.46788990825688076,
+ "grad_norm": 2.0325512886047363,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 0.0807,
+ "step": 34
+ },
+ {
+ "epoch": 0.481651376146789,
+ "grad_norm": 1.6322681903839111,
+ "learning_rate": 1.75e-06,
+ "loss": 0.0725,
+ "step": 35
+ },
+ {
+ "epoch": 0.4954128440366973,
+ "grad_norm": 0.9713364839553833,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 0.067,
+ "step": 36
+ },
+ {
+ "epoch": 0.5091743119266054,
+ "grad_norm": 0.7980225682258606,
+ "learning_rate": 1.85e-06,
+ "loss": 0.0582,
+ "step": 37
+ },
+ {
+ "epoch": 0.5229357798165137,
+ "grad_norm": 1.0616590976715088,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 0.0562,
+ "step": 38
+ },
+ {
+ "epoch": 0.536697247706422,
+ "grad_norm": 1.053462028503418,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 0.0537,
+ "step": 39
+ },
+ {
+ "epoch": 0.5504587155963303,
+ "grad_norm": 0.9452660083770752,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 0.0602,
+ "step": 40
+ },
+ {
+ "epoch": 0.5642201834862385,
+ "grad_norm": 0.830368161201477,
+ "learning_rate": 2.05e-06,
+ "loss": 0.0549,
+ "step": 41
+ },
+ {
+ "epoch": 0.5779816513761468,
+ "grad_norm": 0.5791187882423401,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 0.0479,
+ "step": 42
+ },
+ {
+ "epoch": 0.591743119266055,
+ "grad_norm": 0.44175243377685547,
+ "learning_rate": 2.15e-06,
+ "loss": 0.0461,
+ "step": 43
+ },
+ {
+ "epoch": 0.6055045871559633,
+ "grad_norm": 0.37655699253082275,
+ "learning_rate": 2.2e-06,
+ "loss": 0.043,
+ "step": 44
+ },
+ {
+ "epoch": 0.6192660550458715,
+ "grad_norm": 0.34382495284080505,
+ "learning_rate": 2.25e-06,
+ "loss": 0.0454,
+ "step": 45
+ },
+ {
+ "epoch": 0.6330275229357798,
+ "grad_norm": 0.5047216415405273,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 0.0437,
+ "step": 46
+ },
+ {
+ "epoch": 0.6467889908256881,
+ "grad_norm": 0.6318779587745667,
+ "learning_rate": 2.35e-06,
+ "loss": 0.0468,
+ "step": 47
+ },
+ {
+ "epoch": 0.6605504587155964,
+ "grad_norm": 0.5135455131530762,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 0.0494,
+ "step": 48
+ },
+ {
+ "epoch": 0.6743119266055045,
+ "grad_norm": 0.4802612066268921,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 0.0441,
+ "step": 49
+ },
+ {
+ "epoch": 0.6880733944954128,
+ "grad_norm": 0.6157718300819397,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0398,
+ "step": 50
+ },
+ {
+ "epoch": 0.7018348623853211,
+ "grad_norm": 0.4327130913734436,
+ "learning_rate": 2.55e-06,
+ "loss": 0.0438,
+ "step": 51
+ },
+ {
+ "epoch": 0.7155963302752294,
+ "grad_norm": 0.46133658289909363,
+ "learning_rate": 2.6e-06,
+ "loss": 0.041,
+ "step": 52
+ },
+ {
+ "epoch": 0.7293577981651376,
+ "grad_norm": 0.5729146003723145,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 0.0406,
+ "step": 53
+ },
+ {
+ "epoch": 0.7431192660550459,
+ "grad_norm": 0.32373812794685364,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 0.0419,
+ "step": 54
+ },
+ {
+ "epoch": 0.7568807339449541,
+ "grad_norm": 0.29006752371788025,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 0.0415,
+ "step": 55
+ },
+ {
+ "epoch": 0.7706422018348624,
+ "grad_norm": 0.31038960814476013,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 0.0344,
+ "step": 56
+ },
+ {
+ "epoch": 0.7844036697247706,
+ "grad_norm": 0.2324836701154709,
+ "learning_rate": 2.85e-06,
+ "loss": 0.0374,
+ "step": 57
+ },
+ {
+ "epoch": 0.7981651376146789,
+ "grad_norm": 0.5083625912666321,
+ "learning_rate": 2.9e-06,
+ "loss": 0.0324,
+ "step": 58
+ },
+ {
+ "epoch": 0.8119266055045872,
+ "grad_norm": 0.2873130142688751,
+ "learning_rate": 2.95e-06,
+ "loss": 0.0403,
+ "step": 59
+ },
+ {
+ "epoch": 0.8256880733944955,
+ "grad_norm": 0.437663197517395,
+ "learning_rate": 3e-06,
+ "loss": 0.0368,
+ "step": 60
+ },
+ {
+ "epoch": 0.8394495412844036,
+ "grad_norm": 0.5645247101783752,
+ "learning_rate": 3.05e-06,
+ "loss": 0.0386,
+ "step": 61
+ },
+ {
+ "epoch": 0.8532110091743119,
+ "grad_norm": 0.40374210476875305,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 0.0425,
+ "step": 62
+ },
+ {
+ "epoch": 0.8669724770642202,
+ "grad_norm": 0.46468955278396606,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 0.0323,
+ "step": 63
+ },
+ {
+ "epoch": 0.8807339449541285,
+ "grad_norm": 0.29952895641326904,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 0.0325,
+ "step": 64
+ },
+ {
+ "epoch": 0.8944954128440367,
+ "grad_norm": 0.3678436279296875,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 0.036,
+ "step": 65
+ },
+ {
+ "epoch": 0.908256880733945,
+ "grad_norm": 0.5068934559822083,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 0.0357,
+ "step": 66
+ },
+ {
+ "epoch": 0.9220183486238532,
+ "grad_norm": 0.2723177671432495,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 0.0333,
+ "step": 67
+ },
+ {
+ "epoch": 0.9357798165137615,
+ "grad_norm": 0.41696834564208984,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 0.0347,
+ "step": 68
+ },
+ {
+ "epoch": 0.9495412844036697,
+ "grad_norm": 0.2582981288433075,
+ "learning_rate": 3.45e-06,
+ "loss": 0.0283,
+ "step": 69
+ },
+ {
+ "epoch": 0.963302752293578,
+ "grad_norm": 0.40648311376571655,
+ "learning_rate": 3.5e-06,
+ "loss": 0.0293,
+ "step": 70
+ },
+ {
+ "epoch": 0.9770642201834863,
+ "grad_norm": 0.4149394631385803,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 0.0311,
+ "step": 71
+ },
+ {
+ "epoch": 0.9908256880733946,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 0.0346,
+ "step": 72
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.65e-06,
+ "loss": 0.0291,
+ "step": 73
+ },
+ {
+ "epoch": 1.0137614678899083,
+ "grad_norm": 0.5016496181488037,
+ "learning_rate": 3.7e-06,
+ "loss": 0.0286,
+ "step": 74
+ },
+ {
+ "epoch": 1.0275229357798166,
+ "grad_norm": 0.3533766567707062,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 0.0321,
+ "step": 75
+ },
+ {
+ "epoch": 1.0412844036697249,
+ "grad_norm": 0.2785470485687256,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 0.0277,
+ "step": 76
+ },
+ {
+ "epoch": 1.0550458715596331,
+ "grad_norm": 0.4530641436576843,
+ "learning_rate": 3.85e-06,
+ "loss": 0.0294,
+ "step": 77
+ },
+ {
+ "epoch": 1.0688073394495412,
+ "grad_norm": 0.3170749843120575,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.0274,
+ "step": 78
+ },
+ {
+ "epoch": 1.0825688073394495,
+ "grad_norm": 0.26502758264541626,
+ "learning_rate": 3.95e-06,
+ "loss": 0.0284,
+ "step": 79
+ },
+ {
+ "epoch": 1.0963302752293578,
+ "grad_norm": 0.5486436486244202,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.0285,
+ "step": 80
+ },
+ {
+ "epoch": 1.110091743119266,
+ "grad_norm": 0.24868083000183105,
+ "learning_rate": 4.05e-06,
+ "loss": 0.0301,
+ "step": 81
+ },
+ {
+ "epoch": 1.1238532110091743,
+ "grad_norm": 0.3448987305164337,
+ "learning_rate": 4.1e-06,
+ "loss": 0.0261,
+ "step": 82
+ },
+ {
+ "epoch": 1.1376146788990826,
+ "grad_norm": 0.3330553472042084,
+ "learning_rate": 4.15e-06,
+ "loss": 0.03,
+ "step": 83
+ },
+ {
+ "epoch": 1.151376146788991,
+ "grad_norm": 0.3379852771759033,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.0286,
+ "step": 84
+ },
+ {
+ "epoch": 1.165137614678899,
+ "grad_norm": 0.23678433895111084,
+ "learning_rate": 4.25e-06,
+ "loss": 0.0245,
+ "step": 85
+ },
+ {
+ "epoch": 1.1788990825688073,
+ "grad_norm": 0.24502314627170563,
+ "learning_rate": 4.3e-06,
+ "loss": 0.0267,
+ "step": 86
+ },
+ {
+ "epoch": 1.1926605504587156,
+ "grad_norm": 0.34288597106933594,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.0259,
+ "step": 87
+ },
+ {
+ "epoch": 1.2064220183486238,
+ "grad_norm": 0.20595045387744904,
+ "learning_rate": 4.4e-06,
+ "loss": 0.0241,
+ "step": 88
+ },
+ {
+ "epoch": 1.2201834862385321,
+ "grad_norm": 0.28399360179901123,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.0277,
+ "step": 89
+ },
+ {
+ "epoch": 1.2339449541284404,
+ "grad_norm": 0.273929238319397,
+ "learning_rate": 4.5e-06,
+ "loss": 0.0261,
+ "step": 90
+ },
+ {
+ "epoch": 1.2477064220183487,
+ "grad_norm": 0.24288330972194672,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.0267,
+ "step": 91
+ },
+ {
+ "epoch": 1.261467889908257,
+ "grad_norm": 0.42502400279045105,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.0252,
+ "step": 92
+ },
+ {
+ "epoch": 1.2752293577981653,
+ "grad_norm": 0.17670072615146637,
+ "learning_rate": 4.65e-06,
+ "loss": 0.0231,
+ "step": 93
+ },
+ {
+ "epoch": 1.2889908256880733,
+ "grad_norm": 0.23585423827171326,
+ "learning_rate": 4.7e-06,
+ "loss": 0.0213,
+ "step": 94
+ },
+ {
+ "epoch": 1.3027522935779816,
+ "grad_norm": 0.32558879256248474,
+ "learning_rate": 4.75e-06,
+ "loss": 0.0226,
+ "step": 95
+ },
+ {
+ "epoch": 1.31651376146789,
+ "grad_norm": 0.2908780872821808,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.0274,
+ "step": 96
+ },
+ {
+ "epoch": 1.3302752293577982,
+ "grad_norm": 0.3725607991218567,
+ "learning_rate": 4.85e-06,
+ "loss": 0.0241,
+ "step": 97
+ },
+ {
+ "epoch": 1.3440366972477065,
+ "grad_norm": 0.3833301067352295,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.0252,
+ "step": 98
+ },
+ {
+ "epoch": 1.3577981651376148,
+ "grad_norm": 0.32000771164894104,
+ "learning_rate": 4.95e-06,
+ "loss": 0.0236,
+ "step": 99
+ },
+ {
+ "epoch": 1.3715596330275228,
+ "grad_norm": 0.3203510344028473,
+ "learning_rate": 5e-06,
+ "loss": 0.0235,
+ "step": 100
+ },
+ {
+ "epoch": 1.385321100917431,
+ "grad_norm": 0.20016217231750488,
+ "learning_rate": 4.999888074163108e-06,
+ "loss": 0.0218,
+ "step": 101
+ },
+ {
+ "epoch": 1.3990825688073394,
+ "grad_norm": 0.3012026250362396,
+ "learning_rate": 4.999552306674345e-06,
+ "loss": 0.0234,
+ "step": 102
+ },
+ {
+ "epoch": 1.4128440366972477,
+ "grad_norm": 0.22818222641944885,
+ "learning_rate": 4.998992727598557e-06,
+ "loss": 0.0212,
+ "step": 103
+ },
+ {
+ "epoch": 1.426605504587156,
+ "grad_norm": 0.24128392338752747,
+ "learning_rate": 4.998209387040829e-06,
+ "loss": 0.0207,
+ "step": 104
+ },
+ {
+ "epoch": 1.4403669724770642,
+ "grad_norm": 0.3475555181503296,
+ "learning_rate": 4.9972023551419995e-06,
+ "loss": 0.0246,
+ "step": 105
+ },
+ {
+ "epoch": 1.4541284403669725,
+ "grad_norm": 0.21223627030849457,
+ "learning_rate": 4.995971722072379e-06,
+ "loss": 0.0237,
+ "step": 106
+ },
+ {
+ "epoch": 1.4678899082568808,
+ "grad_norm": 0.3776336908340454,
+ "learning_rate": 4.9945175980236745e-06,
+ "loss": 0.0218,
+ "step": 107
+ },
+ {
+ "epoch": 1.481651376146789,
+ "grad_norm": 0.25027793645858765,
+ "learning_rate": 4.992840113199131e-06,
+ "loss": 0.0214,
+ "step": 108
+ },
+ {
+ "epoch": 1.4954128440366974,
+ "grad_norm": 0.2559281885623932,
+ "learning_rate": 4.990939417801859e-06,
+ "loss": 0.0213,
+ "step": 109
+ },
+ {
+ "epoch": 1.5091743119266054,
+ "grad_norm": 0.33694687485694885,
+ "learning_rate": 4.988815682021398e-06,
+ "loss": 0.0182,
+ "step": 110
+ },
+ {
+ "epoch": 1.5229357798165137,
+ "grad_norm": 0.3175147473812103,
+ "learning_rate": 4.986469096018472e-06,
+ "loss": 0.0213,
+ "step": 111
+ },
+ {
+ "epoch": 1.536697247706422,
+ "grad_norm": 0.48777177929878235,
+ "learning_rate": 4.983899869907963e-06,
+ "loss": 0.0186,
+ "step": 112
+ },
+ {
+ "epoch": 1.5504587155963303,
+ "grad_norm": 0.34633246064186096,
+ "learning_rate": 4.981108233740096e-06,
+ "loss": 0.0224,
+ "step": 113
+ },
+ {
+ "epoch": 1.5642201834862384,
+ "grad_norm": 0.17302758991718292,
+ "learning_rate": 4.978094437479843e-06,
+ "loss": 0.0189,
+ "step": 114
+ },
+ {
+ "epoch": 1.5779816513761467,
+ "grad_norm": 0.24110931158065796,
+ "learning_rate": 4.97485875098454e-06,
+ "loss": 0.0174,
+ "step": 115
+ },
+ {
+ "epoch": 1.591743119266055,
+ "grad_norm": 0.2965283989906311,
+ "learning_rate": 4.971401463979722e-06,
+ "loss": 0.0174,
+ "step": 116
+ },
+ {
+ "epoch": 1.6055045871559632,
+ "grad_norm": 0.34184500575065613,
+ "learning_rate": 4.967722886033181e-06,
+ "loss": 0.0162,
+ "step": 117
+ },
+ {
+ "epoch": 1.6192660550458715,
+ "grad_norm": 0.3942873179912567,
+ "learning_rate": 4.963823346527249e-06,
+ "loss": 0.0176,
+ "step": 118
+ },
+ {
+ "epoch": 1.6330275229357798,
+ "grad_norm": 0.21840929985046387,
+ "learning_rate": 4.959703194629304e-06,
+ "loss": 0.018,
+ "step": 119
+ },
+ {
+ "epoch": 1.646788990825688,
+ "grad_norm": 0.23672759532928467,
+ "learning_rate": 4.955362799260507e-06,
+ "loss": 0.0204,
+ "step": 120
+ },
+ {
+ "epoch": 1.6605504587155964,
+ "grad_norm": 0.18776445090770721,
+ "learning_rate": 4.950802549062764e-06,
+ "loss": 0.0173,
+ "step": 121
+ },
+ {
+ "epoch": 1.6743119266055047,
+ "grad_norm": 0.279297411441803,
+ "learning_rate": 4.946022852363932e-06,
+ "loss": 0.0177,
+ "step": 122
+ },
+ {
+ "epoch": 1.688073394495413,
+ "grad_norm": 0.20893588662147522,
+ "learning_rate": 4.9410241371412525e-06,
+ "loss": 0.0189,
+ "step": 123
+ },
+ {
+ "epoch": 1.7018348623853212,
+ "grad_norm": 0.26409876346588135,
+ "learning_rate": 4.935806850983034e-06,
+ "loss": 0.0195,
+ "step": 124
+ },
+ {
+ "epoch": 1.7155963302752295,
+ "grad_norm": 0.23863324522972107,
+ "learning_rate": 4.9303714610485705e-06,
+ "loss": 0.0151,
+ "step": 125
+ },
+ {
+ "epoch": 1.7293577981651376,
+ "grad_norm": 0.25934213399887085,
+ "learning_rate": 4.924718454026318e-06,
+ "loss": 0.0157,
+ "step": 126
+ },
+ {
+ "epoch": 1.7431192660550459,
+ "grad_norm": 0.2923693358898163,
+ "learning_rate": 4.918848336090309e-06,
+ "loss": 0.0155,
+ "step": 127
+ },
+ {
+ "epoch": 1.7568807339449541,
+ "grad_norm": 0.16973069310188293,
+ "learning_rate": 4.912761632854834e-06,
+ "loss": 0.0156,
+ "step": 128
+ },
+ {
+ "epoch": 1.7706422018348624,
+ "grad_norm": 0.25908610224723816,
+ "learning_rate": 4.906458889327375e-06,
+ "loss": 0.0159,
+ "step": 129
+ },
+ {
+ "epoch": 1.7844036697247705,
+ "grad_norm": 0.27444136142730713,
+ "learning_rate": 4.899940669859807e-06,
+ "loss": 0.0159,
+ "step": 130
+ },
+ {
+ "epoch": 1.7981651376146788,
+ "grad_norm": 0.25167539715766907,
+ "learning_rate": 4.893207558097867e-06,
+ "loss": 0.0147,
+ "step": 131
+ },
+ {
+ "epoch": 1.811926605504587,
+ "grad_norm": 0.25777608156204224,
+ "learning_rate": 4.8862601569288885e-06,
+ "loss": 0.016,
+ "step": 132
+ },
+ {
+ "epoch": 1.8256880733944953,
+ "grad_norm": 0.24190428853034973,
+ "learning_rate": 4.879099088427824e-06,
+ "loss": 0.0128,
+ "step": 133
+ },
+ {
+ "epoch": 1.8394495412844036,
+ "grad_norm": 0.17028410732746124,
+ "learning_rate": 4.871724993801541e-06,
+ "loss": 0.0145,
+ "step": 134
+ },
+ {
+ "epoch": 1.853211009174312,
+ "grad_norm": 0.25141075253486633,
+ "learning_rate": 4.864138533331411e-06,
+ "loss": 0.0144,
+ "step": 135
+ },
+ {
+ "epoch": 1.8669724770642202,
+ "grad_norm": 0.6400424838066101,
+ "learning_rate": 4.8563403863141825e-06,
+ "loss": 0.0128,
+ "step": 136
+ },
+ {
+ "epoch": 1.8807339449541285,
+ "grad_norm": 0.22450514137744904,
+ "learning_rate": 4.84833125100116e-06,
+ "loss": 0.0167,
+ "step": 137
+ },
+ {
+ "epoch": 1.8944954128440368,
+ "grad_norm": 0.19940154254436493,
+ "learning_rate": 4.840111844535682e-06,
+ "loss": 0.0154,
+ "step": 138
+ },
+ {
+ "epoch": 1.908256880733945,
+ "grad_norm": 0.2946206033229828,
+ "learning_rate": 4.8316829028889076e-06,
+ "loss": 0.0158,
+ "step": 139
+ },
+ {
+ "epoch": 1.9220183486238533,
+ "grad_norm": 0.4694023132324219,
+ "learning_rate": 4.823045180793914e-06,
+ "loss": 0.0153,
+ "step": 140
+ },
+ {
+ "epoch": 1.9357798165137616,
+ "grad_norm": 0.23655226826667786,
+ "learning_rate": 4.8141994516781196e-06,
+ "loss": 0.0109,
+ "step": 141
+ },
+ {
+ "epoch": 1.9495412844036697,
+ "grad_norm": 0.23846553266048431,
+ "learning_rate": 4.805146507594034e-06,
+ "loss": 0.0129,
+ "step": 142
+ },
+ {
+ "epoch": 1.963302752293578,
+ "grad_norm": 0.2763686180114746,
+ "learning_rate": 4.7958871591483305e-06,
+ "loss": 0.0124,
+ "step": 143
+ },
+ {
+ "epoch": 1.9770642201834863,
+ "grad_norm": 0.23522883653640747,
+ "learning_rate": 4.786422235429269e-06,
+ "loss": 0.0125,
+ "step": 144
+ },
+ {
+ "epoch": 1.9908256880733946,
+ "grad_norm": 0.27468597888946533,
+ "learning_rate": 4.776752583932455e-06,
+ "loss": 0.0137,
+ "step": 145
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.26614058017730713,
+ "learning_rate": 4.766879070484957e-06,
+ "loss": 0.0113,
+ "step": 146
+ },
+ {
+ "epoch": 2.0137614678899083,
+ "grad_norm": 0.2862620949745178,
+ "learning_rate": 4.756802579167781e-06,
+ "loss": 0.0104,
+ "step": 147
+ },
+ {
+ "epoch": 2.0275229357798166,
+ "grad_norm": 0.1770494133234024,
+ "learning_rate": 4.746524012236706e-06,
+ "loss": 0.0102,
+ "step": 148
+ },
+ {
+ "epoch": 2.041284403669725,
+ "grad_norm": 0.1850830465555191,
+ "learning_rate": 4.736044290041496e-06,
+ "loss": 0.0125,
+ "step": 149
+ },
+ {
+ "epoch": 2.055045871559633,
+ "grad_norm": 0.20552438497543335,
+ "learning_rate": 4.725364350943492e-06,
+ "loss": 0.0091,
+ "step": 150
+ },
+ {
+ "epoch": 2.0688073394495414,
+ "grad_norm": 0.12979158759117126,
+ "learning_rate": 4.714485151231593e-06,
+ "loss": 0.0113,
+ "step": 151
+ },
+ {
+ "epoch": 2.0825688073394497,
+ "grad_norm": 0.16992178559303284,
+ "learning_rate": 4.703407665036622e-06,
+ "loss": 0.0075,
+ "step": 152
+ },
+ {
+ "epoch": 2.096330275229358,
+ "grad_norm": 0.1473352611064911,
+ "learning_rate": 4.692132884244113e-06,
+ "loss": 0.0091,
+ "step": 153
+ },
+ {
+ "epoch": 2.1100917431192663,
+ "grad_norm": 0.24198868870735168,
+ "learning_rate": 4.680661818405485e-06,
+ "loss": 0.0073,
+ "step": 154
+ },
+ {
+ "epoch": 2.123853211009174,
+ "grad_norm": 0.25488734245300293,
+ "learning_rate": 4.668995494647653e-06,
+ "loss": 0.0098,
+ "step": 155
+ },
+ {
+ "epoch": 2.1376146788990824,
+ "grad_norm": 0.17986920475959778,
+ "learning_rate": 4.657134957581057e-06,
+ "loss": 0.0081,
+ "step": 156
+ },
+ {
+ "epoch": 2.1513761467889907,
+ "grad_norm": 0.22465726733207703,
+ "learning_rate": 4.645081269206128e-06,
+ "loss": 0.0074,
+ "step": 157
+ },
+ {
+ "epoch": 2.165137614678899,
+ "grad_norm": 0.19391046464443207,
+ "learning_rate": 4.632835508818192e-06,
+ "loss": 0.0077,
+ "step": 158
+ },
+ {
+ "epoch": 2.1788990825688073,
+ "grad_norm": 0.25997886061668396,
+ "learning_rate": 4.620398772910833e-06,
+ "loss": 0.0074,
+ "step": 159
+ },
+ {
+ "epoch": 2.1926605504587156,
+ "grad_norm": 0.2422141134738922,
+ "learning_rate": 4.607772175077712e-06,
+ "loss": 0.0103,
+ "step": 160
+ },
+ {
+ "epoch": 2.206422018348624,
+ "grad_norm": 0.2785587012767792,
+ "learning_rate": 4.59495684591285e-06,
+ "loss": 0.0079,
+ "step": 161
+ },
+ {
+ "epoch": 2.220183486238532,
+ "grad_norm": 0.19141560792922974,
+ "learning_rate": 4.581953932909403e-06,
+ "loss": 0.0068,
+ "step": 162
+ },
+ {
+ "epoch": 2.2339449541284404,
+ "grad_norm": 0.152049720287323,
+ "learning_rate": 4.5687646003569055e-06,
+ "loss": 0.0063,
+ "step": 163
+ },
+ {
+ "epoch": 2.2477064220183487,
+ "grad_norm": 0.19313585758209229,
+ "learning_rate": 4.555390029237026e-06,
+ "loss": 0.0079,
+ "step": 164
+ },
+ {
+ "epoch": 2.261467889908257,
+ "grad_norm": 0.19979022443294525,
+ "learning_rate": 4.541831417117815e-06,
+ "loss": 0.0085,
+ "step": 165
+ },
+ {
+ "epoch": 2.2752293577981653,
+ "grad_norm": 0.2902522683143616,
+ "learning_rate": 4.528089978046481e-06,
+ "loss": 0.0082,
+ "step": 166
+ },
+ {
+ "epoch": 2.2889908256880735,
+ "grad_norm": 0.5130491256713867,
+ "learning_rate": 4.514166942440679e-06,
+ "loss": 0.0067,
+ "step": 167
+ },
+ {
+ "epoch": 2.302752293577982,
+ "grad_norm": 0.15980036556720734,
+ "learning_rate": 4.5000635569783365e-06,
+ "loss": 0.0093,
+ "step": 168
+ },
+ {
+ "epoch": 2.31651376146789,
+ "grad_norm": 0.19247184693813324,
+ "learning_rate": 4.4857810844860325e-06,
+ "loss": 0.0101,
+ "step": 169
+ },
+ {
+ "epoch": 2.330275229357798,
+ "grad_norm": 0.19173133373260498,
+ "learning_rate": 4.471320803825915e-06,
+ "loss": 0.0065,
+ "step": 170
+ },
+ {
+ "epoch": 2.3440366972477062,
+ "grad_norm": 0.1779325157403946,
+ "learning_rate": 4.4566840097811956e-06,
+ "loss": 0.0059,
+ "step": 171
+ },
+ {
+ "epoch": 2.3577981651376145,
+ "grad_norm": 0.1639624983072281,
+ "learning_rate": 4.4418720129402145e-06,
+ "loss": 0.0052,
+ "step": 172
+ },
+ {
+ "epoch": 2.371559633027523,
+ "grad_norm": 0.2355070263147354,
+ "learning_rate": 4.426886139579083e-06,
+ "loss": 0.0046,
+ "step": 173
+ },
+ {
+ "epoch": 2.385321100917431,
+ "grad_norm": 0.20461603999137878,
+ "learning_rate": 4.411727731542937e-06,
+ "loss": 0.0071,
+ "step": 174
+ },
+ {
+ "epoch": 2.3990825688073394,
+ "grad_norm": 0.20251843333244324,
+ "learning_rate": 4.39639814612578e-06,
+ "loss": 0.0054,
+ "step": 175
+ },
+ {
+ "epoch": 2.4128440366972477,
+ "grad_norm": 0.17055197060108185,
+ "learning_rate": 4.3808987559489536e-06,
+ "loss": 0.0052,
+ "step": 176
+ },
+ {
+ "epoch": 2.426605504587156,
+ "grad_norm": 0.2508833706378937,
+ "learning_rate": 4.365230948838232e-06,
+ "loss": 0.0071,
+ "step": 177
+ },
+ {
+ "epoch": 2.4403669724770642,
+ "grad_norm": 0.21865351498126984,
+ "learning_rate": 4.349396127699552e-06,
+ "loss": 0.0068,
+ "step": 178
+ },
+ {
+ "epoch": 2.4541284403669725,
+ "grad_norm": 0.225867360830307,
+ "learning_rate": 4.3333957103934025e-06,
+ "loss": 0.0072,
+ "step": 179
+ },
+ {
+ "epoch": 2.467889908256881,
+ "grad_norm": 0.48035845160484314,
+ "learning_rate": 4.317231129607859e-06,
+ "loss": 0.006,
+ "step": 180
+ },
+ {
+ "epoch": 2.481651376146789,
+ "grad_norm": 0.17027413845062256,
+ "learning_rate": 4.30090383273031e-06,
+ "loss": 0.0043,
+ "step": 181
+ },
+ {
+ "epoch": 2.4954128440366974,
+ "grad_norm": 0.2660333216190338,
+ "learning_rate": 4.2844152817178476e-06,
+ "loss": 0.0082,
+ "step": 182
+ },
+ {
+ "epoch": 2.5091743119266052,
+ "grad_norm": 0.2485552728176117,
+ "learning_rate": 4.267766952966369e-06,
+ "loss": 0.0059,
+ "step": 183
+ },
+ {
+ "epoch": 2.522935779816514,
+ "grad_norm": 0.2048122137784958,
+ "learning_rate": 4.2509603371783776e-06,
+ "loss": 0.0071,
+ "step": 184
+ },
+ {
+ "epoch": 2.536697247706422,
+ "grad_norm": 0.1725567877292633,
+ "learning_rate": 4.233996939229502e-06,
+ "loss": 0.0053,
+ "step": 185
+ },
+ {
+ "epoch": 2.5504587155963305,
+ "grad_norm": 0.14592835307121277,
+ "learning_rate": 4.216878278033753e-06,
+ "loss": 0.0044,
+ "step": 186
+ },
+ {
+ "epoch": 2.5642201834862384,
+ "grad_norm": 0.2868310213088989,
+ "learning_rate": 4.199605886407515e-06,
+ "loss": 0.0057,
+ "step": 187
+ },
+ {
+ "epoch": 2.5779816513761467,
+ "grad_norm": 0.5002567172050476,
+ "learning_rate": 4.1821813109322975e-06,
+ "loss": 0.0072,
+ "step": 188
+ },
+ {
+ "epoch": 2.591743119266055,
+ "grad_norm": 0.199919655919075,
+ "learning_rate": 4.164606111816256e-06,
+ "loss": 0.0043,
+ "step": 189
+ },
+ {
+ "epoch": 2.6055045871559632,
+ "grad_norm": 0.17280228435993195,
+ "learning_rate": 4.146881862754485e-06,
+ "loss": 0.0048,
+ "step": 190
+ },
+ {
+ "epoch": 2.6192660550458715,
+ "grad_norm": 0.17505577206611633,
+ "learning_rate": 4.129010150788112e-06,
+ "loss": 0.0037,
+ "step": 191
+ },
+ {
+ "epoch": 2.63302752293578,
+ "grad_norm": 0.21955189108848572,
+ "learning_rate": 4.110992576162193e-06,
+ "loss": 0.0044,
+ "step": 192
+ },
+ {
+ "epoch": 2.646788990825688,
+ "grad_norm": 0.22081787884235382,
+ "learning_rate": 4.092830752182423e-06,
+ "loss": 0.0036,
+ "step": 193
+ },
+ {
+ "epoch": 2.6605504587155964,
+ "grad_norm": 0.26361310482025146,
+ "learning_rate": 4.074526305070679e-06,
+ "loss": 0.0062,
+ "step": 194
+ },
+ {
+ "epoch": 2.6743119266055047,
+ "grad_norm": 0.15528841316699982,
+ "learning_rate": 4.056080873819412e-06,
+ "loss": 0.0029,
+ "step": 195
+ },
+ {
+ "epoch": 2.688073394495413,
+ "grad_norm": 6.002184867858887,
+ "learning_rate": 4.037496110044885e-06,
+ "loss": 0.0062,
+ "step": 196
+ },
+ {
+ "epoch": 2.7018348623853212,
+ "grad_norm": 0.12827160954475403,
+ "learning_rate": 4.018773677839289e-06,
+ "loss": 0.0025,
+ "step": 197
+ },
+ {
+ "epoch": 2.7155963302752295,
+ "grad_norm": 0.21857935190200806,
+ "learning_rate": 3.999915253621739e-06,
+ "loss": 0.004,
+ "step": 198
+ },
+ {
+ "epoch": 2.729357798165138,
+ "grad_norm": 0.231048583984375,
+ "learning_rate": 3.980922525988167e-06,
+ "loss": 0.0034,
+ "step": 199
+ },
+ {
+ "epoch": 2.7431192660550456,
+ "grad_norm": 0.16589054465293884,
+ "learning_rate": 3.961797195560118e-06,
+ "loss": 0.0045,
+ "step": 200
+ },
+ {
+ "epoch": 2.7568807339449544,
+ "grad_norm": 0.2721656858921051,
+ "learning_rate": 3.942540974832486e-06,
+ "loss": 0.0056,
+ "step": 201
+ },
+ {
+ "epoch": 2.770642201834862,
+ "grad_norm": 0.12930598855018616,
+ "learning_rate": 3.9231555880201655e-06,
+ "loss": 0.0037,
+ "step": 202
+ },
+ {
+ "epoch": 2.7844036697247705,
+ "grad_norm": 0.299055814743042,
+ "learning_rate": 3.903642770903671e-06,
+ "loss": 0.0036,
+ "step": 203
+ },
+ {
+ "epoch": 2.7981651376146788,
+ "grad_norm": 0.15592966973781586,
+ "learning_rate": 3.884004270673711e-06,
+ "loss": 0.0023,
+ "step": 204
+ },
+ {
+ "epoch": 2.811926605504587,
+ "grad_norm": 0.10496195405721664,
+ "learning_rate": 3.864241845774746e-06,
+ "loss": 0.0029,
+ "step": 205
+ },
+ {
+ "epoch": 2.8256880733944953,
+ "grad_norm": 0.1604638695716858,
+ "learning_rate": 3.844357265747531e-06,
+ "loss": 0.0032,
+ "step": 206
+ },
+ {
+ "epoch": 2.8394495412844036,
+ "grad_norm": 0.1436438113451004,
+ "learning_rate": 3.8243523110706736e-06,
+ "loss": 0.0035,
+ "step": 207
+ },
+ {
+ "epoch": 2.853211009174312,
+ "grad_norm": 0.176204651594162,
+ "learning_rate": 3.8042287730012117e-06,
+ "loss": 0.0022,
+ "step": 208
+ },
+ {
+ "epoch": 2.86697247706422,
+ "grad_norm": 0.3500923216342926,
+ "learning_rate": 3.7839884534142157e-06,
+ "loss": 0.0022,
+ "step": 209
+ },
+ {
+ "epoch": 2.8807339449541285,
+ "grad_norm": 0.2319999784231186,
+ "learning_rate": 3.7636331646414524e-06,
+ "loss": 0.003,
+ "step": 210
+ },
+ {
+ "epoch": 2.8944954128440368,
+ "grad_norm": 0.1358226090669632,
+ "learning_rate": 3.7431647293091076e-06,
+ "loss": 0.0023,
+ "step": 211
+ },
+ {
+ "epoch": 2.908256880733945,
+ "grad_norm": 0.1323612779378891,
+ "learning_rate": 3.7225849801745835e-06,
+ "loss": 0.0021,
+ "step": 212
+ },
+ {
+ "epoch": 2.9220183486238533,
+ "grad_norm": 0.10968377441167831,
+ "learning_rate": 3.701895759962397e-06,
+ "loss": 0.002,
+ "step": 213
+ },
+ {
+ "epoch": 2.9357798165137616,
+ "grad_norm": 0.11163649708032608,
+ "learning_rate": 3.6810989211991777e-06,
+ "loss": 0.0015,
+ "step": 214
+ },
+ {
+ "epoch": 2.9495412844036695,
+ "grad_norm": 0.6103344559669495,
+ "learning_rate": 3.6601963260477923e-06,
+ "loss": 0.0051,
+ "step": 215
+ },
+ {
+ "epoch": 2.963302752293578,
+ "grad_norm": 0.1300484985113144,
+ "learning_rate": 3.6391898461406045e-06,
+ "loss": 0.0018,
+ "step": 216
+ },
+ {
+ "epoch": 2.977064220183486,
+ "grad_norm": 0.11599847674369812,
+ "learning_rate": 3.6180813624118898e-06,
+ "loss": 0.0021,
+ "step": 217
+ },
+ {
+ "epoch": 2.9908256880733948,
+ "grad_norm": 0.14168186485767365,
+ "learning_rate": 3.5968727649294134e-06,
+ "loss": 0.0018,
+ "step": 218
+ },
+ {
+ "epoch": 3.0,
+ "grad_norm": 0.16039852797985077,
+ "learning_rate": 3.575565952725193e-06,
+ "loss": 0.0014,
+ "step": 219
+ },
+ {
+ "epoch": 3.0137614678899083,
+ "grad_norm": 0.08175123482942581,
+ "learning_rate": 3.55416283362546e-06,
+ "loss": 0.0008,
+ "step": 220
+ },
+ {
+ "epoch": 3.0275229357798166,
+ "grad_norm": 0.07637064158916473,
+ "learning_rate": 3.5326653240798283e-06,
+ "loss": 0.0007,
+ "step": 221
+ },
+ {
+ "epoch": 3.041284403669725,
+ "grad_norm": 0.061755988746881485,
+ "learning_rate": 3.5110753489896924e-06,
+ "loss": 0.0007,
+ "step": 222
+ },
+ {
+ "epoch": 3.055045871559633,
+ "grad_norm": 0.05573924630880356,
+ "learning_rate": 3.4893948415358803e-06,
+ "loss": 0.0008,
+ "step": 223
+ },
+ {
+ "epoch": 3.0688073394495414,
+ "grad_norm": 0.18670693039894104,
+ "learning_rate": 3.4676257430055438e-06,
+ "loss": 0.0007,
+ "step": 224
+ },
+ {
+ "epoch": 3.0825688073394497,
+ "grad_norm": 0.05674147605895996,
+ "learning_rate": 3.4457700026183378e-06,
+ "loss": 0.0008,
+ "step": 225
+ },
+ {
+ "epoch": 3.096330275229358,
+ "grad_norm": 0.08101407438516617,
+ "learning_rate": 3.4238295773518924e-06,
+ "loss": 0.0006,
+ "step": 226
+ },
+ {
+ "epoch": 3.1100917431192663,
+ "grad_norm": 0.09028138220310211,
+ "learning_rate": 3.4018064317665745e-06,
+ "loss": 0.0011,
+ "step": 227
+ },
+ {
+ "epoch": 3.123853211009174,
+ "grad_norm": 0.0955042913556099,
+ "learning_rate": 3.3797025378295826e-06,
+ "loss": 0.0008,
+ "step": 228
+ },
+ {
+ "epoch": 3.1376146788990824,
+ "grad_norm": 0.09355667978525162,
+ "learning_rate": 3.357519874738382e-06,
+ "loss": 0.0011,
+ "step": 229
+ },
+ {
+ "epoch": 3.1513761467889907,
+ "grad_norm": 0.06623287498950958,
+ "learning_rate": 3.3352604287434752e-06,
+ "loss": 0.0006,
+ "step": 230
+ },
+ {
+ "epoch": 3.165137614678899,
+ "grad_norm": 0.07880504429340363,
+ "learning_rate": 3.31292619297056e-06,
+ "loss": 0.0007,
+ "step": 231
+ },
+ {
+ "epoch": 3.1788990825688073,
+ "grad_norm": 0.06904889643192291,
+ "learning_rate": 3.29051916724206e-06,
+ "loss": 0.0005,
+ "step": 232
+ },
+ {
+ "epoch": 3.1926605504587156,
+ "grad_norm": 0.23911181092262268,
+ "learning_rate": 3.2680413578980623e-06,
+ "loss": 0.0007,
+ "step": 233
+ },
+ {
+ "epoch": 3.206422018348624,
+ "grad_norm": 0.15154607594013214,
+ "learning_rate": 3.245494777616664e-06,
+ "loss": 0.0015,
+ "step": 234
+ },
+ {
+ "epoch": 3.220183486238532,
+ "grad_norm": 0.15627366304397583,
+ "learning_rate": 3.2228814452337587e-06,
+ "loss": 0.001,
+ "step": 235
+ },
+ {
+ "epoch": 3.2339449541284404,
+ "grad_norm": 0.0780797079205513,
+ "learning_rate": 3.2002033855622683e-06,
+ "loss": 0.0005,
+ "step": 236
+ },
+ {
+ "epoch": 3.2477064220183487,
+ "grad_norm": 0.7883831858634949,
+ "learning_rate": 3.177462629210838e-06,
+ "loss": 0.0005,
+ "step": 237
+ },
+ {
+ "epoch": 3.261467889908257,
+ "grad_norm": 0.18375582993030548,
+ "learning_rate": 3.154661212402017e-06,
+ "loss": 0.0006,
+ "step": 238
+ },
+ {
+ "epoch": 3.2752293577981653,
+ "grad_norm": 0.09809675812721252,
+ "learning_rate": 3.131801176789934e-06,
+ "loss": 0.001,
+ "step": 239
+ },
+ {
+ "epoch": 3.2889908256880735,
+ "grad_norm": 0.04473511874675751,
+ "learning_rate": 3.1088845692774798e-06,
+ "loss": 0.0002,
+ "step": 240
+ },
+ {
+ "epoch": 3.302752293577982,
+ "grad_norm": 0.07583656907081604,
+ "learning_rate": 3.0859134418330373e-06,
+ "loss": 0.0007,
+ "step": 241
+ },
+ {
+ "epoch": 3.31651376146789,
+ "grad_norm": 0.06534383445978165,
+ "learning_rate": 3.0628898513067357e-06,
+ "loss": 0.0003,
+ "step": 242
+ },
+ {
+ "epoch": 3.330275229357798,
+ "grad_norm": 0.09651501476764679,
+ "learning_rate": 3.0398158592462847e-06,
+ "loss": 0.0012,
+ "step": 243
+ },
+ {
+ "epoch": 3.3440366972477062,
+ "grad_norm": 0.07052983343601227,
+ "learning_rate": 3.0166935317123824e-06,
+ "loss": 0.0007,
+ "step": 244
+ },
+ {
+ "epoch": 3.3577981651376145,
+ "grad_norm": 0.09956210106611252,
+ "learning_rate": 2.9935249390937184e-06,
+ "loss": 0.0009,
+ "step": 245
+ },
+ {
+ "epoch": 3.371559633027523,
+ "grad_norm": 0.051604535430669785,
+ "learning_rate": 2.970312155921585e-06,
+ "loss": 0.0005,
+ "step": 246
+ },
+ {
+ "epoch": 3.385321100917431,
+ "grad_norm": 0.16182328760623932,
+ "learning_rate": 2.9470572606841295e-06,
+ "loss": 0.0006,
+ "step": 247
+ },
+ {
+ "epoch": 3.3990825688073394,
+ "grad_norm": 0.07653603702783585,
+ "learning_rate": 2.9237623356402423e-06,
+ "loss": 0.0005,
+ "step": 248
+ },
+ {
+ "epoch": 3.4128440366972477,
+ "grad_norm": 0.0773971900343895,
+ "learning_rate": 2.900429466633107e-06,
+ "loss": 0.0009,
+ "step": 249
+ },
+ {
+ "epoch": 3.426605504587156,
+ "grad_norm": 0.1629229336977005,
+ "learning_rate": 2.8770607429034352e-06,
+ "loss": 0.0013,
+ "step": 250
+ },
+ {
+ "epoch": 3.4403669724770642,
+ "grad_norm": 0.1734458953142166,
+ "learning_rate": 2.8536582569023964e-06,
+ "loss": 0.0009,
+ "step": 251
+ },
+ {
+ "epoch": 3.4541284403669725,
+ "grad_norm": 0.08963964134454727,
+ "learning_rate": 2.8302241041042564e-06,
+ "loss": 0.0004,
+ "step": 252
+ },
+ {
+ "epoch": 3.467889908256881,
+ "grad_norm": 0.037656184285879135,
+ "learning_rate": 2.8067603828187446e-06,
+ "loss": 0.0002,
+ "step": 253
+ },
+ {
+ "epoch": 3.481651376146789,
+ "grad_norm": 0.07996565848588943,
+ "learning_rate": 2.7832691940031755e-06,
+ "loss": 0.0006,
+ "step": 254
+ },
+ {
+ "epoch": 3.4954128440366974,
+ "grad_norm": 0.39782819151878357,
+ "learning_rate": 2.759752641074322e-06,
+ "loss": 0.0004,
+ "step": 255
+ },
+ {
+ "epoch": 3.5091743119266052,
+ "grad_norm": 0.056630104780197144,
+ "learning_rate": 2.7362128297200784e-06,
+ "loss": 0.0004,
+ "step": 256
+ },
+ {
+ "epoch": 3.522935779816514,
+ "grad_norm": 0.18943996727466583,
+ "learning_rate": 2.712651867710914e-06,
+ "loss": 0.0017,
+ "step": 257
+ },
+ {
+ "epoch": 3.536697247706422,
+ "grad_norm": 0.04645173251628876,
+ "learning_rate": 2.6890718647111424e-06,
+ "loss": 0.0004,
+ "step": 258
+ },
+ {
+ "epoch": 3.5504587155963305,
+ "grad_norm": 0.07644187659025192,
+ "learning_rate": 2.665474932090017e-06,
+ "loss": 0.0008,
+ "step": 259
+ },
+ {
+ "epoch": 3.5642201834862384,
+ "grad_norm": 0.04974009841680527,
+ "learning_rate": 2.6418631827326857e-06,
+ "loss": 0.0005,
+ "step": 260
+ },
+ {
+ "epoch": 3.5779816513761467,
+ "grad_norm": 0.06213025003671646,
+ "learning_rate": 2.6182387308509927e-06,
+ "loss": 0.0005,
+ "step": 261
+ },
+ {
+ "epoch": 3.591743119266055,
+ "grad_norm": 0.09061244130134583,
+ "learning_rate": 2.5946036917941765e-06,
+ "loss": 0.0006,
+ "step": 262
+ },
+ {
+ "epoch": 3.6055045871559632,
+ "grad_norm": 0.0796905905008316,
+ "learning_rate": 2.570960181859458e-06,
+ "loss": 0.0006,
+ "step": 263
+ },
+ {
+ "epoch": 3.6192660550458715,
+ "grad_norm": 0.19685125350952148,
+ "learning_rate": 2.547310318102548e-06,
+ "loss": 0.0006,
+ "step": 264
+ },
+ {
+ "epoch": 3.63302752293578,
+ "grad_norm": 0.030696067959070206,
+ "learning_rate": 2.5236562181480794e-06,
+ "loss": 0.0001,
+ "step": 265
+ },
+ {
+ "epoch": 3.646788990825688,
+ "grad_norm": 0.04516645520925522,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0004,
+ "step": 266
+ },
+ {
+ "epoch": 3.6605504587155964,
+ "grad_norm": 0.09353721141815186,
+ "learning_rate": 2.4763437818519205e-06,
+ "loss": 0.0003,
+ "step": 267
+ },
+ {
+ "epoch": 3.6743119266055047,
+ "grad_norm": 0.05623761937022209,
+ "learning_rate": 2.4526896818974534e-06,
+ "loss": 0.0003,
+ "step": 268
+ },
+ {
+ "epoch": 3.688073394495413,
+ "grad_norm": 0.11353174597024918,
+ "learning_rate": 2.429039818140543e-06,
+ "loss": 0.0002,
+ "step": 269
+ },
+ {
+ "epoch": 3.7018348623853212,
+ "grad_norm": 0.03043302521109581,
+ "learning_rate": 2.405396308205825e-06,
+ "loss": 0.0002,
+ "step": 270
+ },
+ {
+ "epoch": 3.7155963302752295,
+ "grad_norm": 0.05028878524899483,
+ "learning_rate": 2.381761269149009e-06,
+ "loss": 0.0002,
+ "step": 271
+ },
+ {
+ "epoch": 3.729357798165138,
+ "grad_norm": 0.03009975329041481,
+ "learning_rate": 2.358136817267315e-06,
+ "loss": 0.0002,
+ "step": 272
+ },
+ {
+ "epoch": 3.7431192660550456,
+ "grad_norm": 0.08547350764274597,
+ "learning_rate": 2.334525067909983e-06,
+ "loss": 0.0005,
+ "step": 273
+ },
+ {
+ "epoch": 3.7568807339449544,
+ "grad_norm": 0.03611677512526512,
+ "learning_rate": 2.3109281352888593e-06,
+ "loss": 0.0002,
+ "step": 274
+ },
+ {
+ "epoch": 3.770642201834862,
+ "grad_norm": 0.03364509344100952,
+ "learning_rate": 2.2873481322890866e-06,
+ "loss": 0.0001,
+ "step": 275
+ },
+ {
+ "epoch": 3.7844036697247705,
+ "grad_norm": 0.04283633828163147,
+ "learning_rate": 2.263787170279922e-06,
+ "loss": 0.0003,
+ "step": 276
+ },
+ {
+ "epoch": 3.7981651376146788,
+ "grad_norm": 0.057849906384944916,
+ "learning_rate": 2.2402473589256793e-06,
+ "loss": 0.0002,
+ "step": 277
+ },
+ {
+ "epoch": 3.811926605504587,
+ "grad_norm": 0.12497337907552719,
+ "learning_rate": 2.2167308059968258e-06,
+ "loss": 0.0002,
+ "step": 278
+ },
+ {
+ "epoch": 3.8256880733944953,
+ "grad_norm": 0.044824711978435516,
+ "learning_rate": 2.193239617181256e-06,
+ "loss": 0.0003,
+ "step": 279
+ },
+ {
+ "epoch": 3.8394495412844036,
+ "grad_norm": 0.06536471843719482,
+ "learning_rate": 2.169775895895745e-06,
+ "loss": 0.0002,
+ "step": 280
+ },
+ {
+ "epoch": 3.853211009174312,
+ "grad_norm": 0.07215467095375061,
+ "learning_rate": 2.146341743097604e-06,
+ "loss": 0.0002,
+ "step": 281
+ },
+ {
+ "epoch": 3.86697247706422,
+ "grad_norm": 0.01871681585907936,
+ "learning_rate": 2.1229392570965656e-06,
+ "loss": 0.0001,
+ "step": 282
+ },
+ {
+ "epoch": 3.8807339449541285,
+ "grad_norm": 0.043813955038785934,
+ "learning_rate": 2.0995705333668948e-06,
+ "loss": 0.0002,
+ "step": 283
+ },
+ {
+ "epoch": 3.8944954128440368,
+ "grad_norm": 0.046582091599702835,
+ "learning_rate": 2.0762376643597586e-06,
+ "loss": 0.0003,
+ "step": 284
+ },
+ {
+ "epoch": 3.908256880733945,
+ "grad_norm": 0.06702767312526703,
+ "learning_rate": 2.0529427393158704e-06,
+ "loss": 0.0004,
+ "step": 285
+ },
+ {
+ "epoch": 3.9220183486238533,
+ "grad_norm": 0.053124528378248215,
+ "learning_rate": 2.0296878440784164e-06,
+ "loss": 0.0005,
+ "step": 286
+ },
+ {
+ "epoch": 3.9357798165137616,
+ "grad_norm": 0.029215684160590172,
+ "learning_rate": 2.006475060906283e-06,
+ "loss": 0.0002,
+ "step": 287
+ },
+ {
+ "epoch": 3.9495412844036695,
+ "grad_norm": 0.03736970201134682,
+ "learning_rate": 1.9833064682876175e-06,
+ "loss": 0.0001,
+ "step": 288
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 432,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 72,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 1.8454300991357452e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-288/training_args.bin b/checkpoint-288/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6b48f4b52f5bfff81ec8534f6510460a8014f336
--- /dev/null
+++ b/checkpoint-288/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cbafef0713d4b794ca3a92a04d378baaf3fa7647c9af95948bfb2ef7c0e02eda
+size 7928
diff --git a/checkpoint-288/zero_to_fp32.py b/checkpoint-288/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-288/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-360/README.md b/checkpoint-360/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b184114a0c28ed3e4c082c18486736dc818166d
--- /dev/null
+++ b/checkpoint-360/README.md
@@ -0,0 +1,202 @@
+---
+base_model: meta-llama/Llama-3.3-70B-Instruct
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-360/adapter_config.json b/checkpoint-360/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/checkpoint-360/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-360/adapter_model.safetensors b/checkpoint-360/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..1239e0ee0f1b353d42fd0fc19d3ab8793d1c0bc6
--- /dev/null
+++ b/checkpoint-360/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:34fe17cfa3a28141a368fe740ab99e7ffa2bc8f8f522c01938fe5e6b12430430
+size 10829849744
diff --git a/checkpoint-360/global_step358/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-360/global_step358/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..5ca1e85762eb386fc949de65a66dbb1fb6017af2
--- /dev/null
+++ b/checkpoint-360/global_step358/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:046721f6db9b5442dd45a68915fda8974f2c5ee3d2e13a7c5c9890f0ddf56d5f
+size 21659418140
diff --git a/checkpoint-360/global_step358/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-360/global_step358/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..30ac1ea5b2b4fd6cc4425b4ef04eb31594a2bb7a
--- /dev/null
+++ b/checkpoint-360/global_step358/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ce8c0d36adba966931d549e396a2bc5c4df67cf796482a55c631274fa0b94b17
+size 21659457372
diff --git a/checkpoint-360/global_step358/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/checkpoint-360/global_step358/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..f4f1e84c5b20d543e93f7ecdcfbe4e29b071a732
--- /dev/null
+++ b/checkpoint-360/global_step358/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f218052957d522e59818dfc6d817b253d2ee021b6b951f84116db1dbd851efc3
+size 21659417820
diff --git a/checkpoint-360/global_step358/mp_rank_00_model_states.pt b/checkpoint-360/global_step358/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..97d32f4023dd3f7457e5df060997ade356e50028
--- /dev/null
+++ b/checkpoint-360/global_step358/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ab850764a4333a33af5380aeebd0c9eee8b0ab79fc3005c8438cac6e236633be
+size 11918643933
diff --git a/checkpoint-360/latest b/checkpoint-360/latest
new file mode 100644
index 0000000000000000000000000000000000000000..4b13c3d680741404e7037fa9bda4ee0813cbc95a
--- /dev/null
+++ b/checkpoint-360/latest
@@ -0,0 +1 @@
+global_step358
\ No newline at end of file
diff --git a/checkpoint-360/rng_state_0.pth b/checkpoint-360/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..a9fa51bd3ff52e34fc0a6c92934f69ffe450edf5
--- /dev/null
+++ b/checkpoint-360/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:71fb5ae5e74f749d05855787f34ab8cadca32bae0db9f6986f36dc49e51ab480
+size 14768
diff --git a/checkpoint-360/rng_state_1.pth b/checkpoint-360/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..27ef76d99f7b2269fbf819b67bbe5d24037d5605
--- /dev/null
+++ b/checkpoint-360/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:aad9dfac1d80c9f2275fe633e92cf3b80770873aa55f832188fcbb959aaaac63
+size 14768
diff --git a/checkpoint-360/rng_state_2.pth b/checkpoint-360/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..2095932a3ece56b00b80902f9b4723975f4beac1
--- /dev/null
+++ b/checkpoint-360/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8e4611b6e380a15837e3c2fc583d10e14f6e8ff0e43ae3f28ccd6ed9685d54d4
+size 14768
diff --git a/checkpoint-360/scheduler.pt b/checkpoint-360/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..a9bea00971e15c82f3fef418b45f5772efc6e041
--- /dev/null
+++ b/checkpoint-360/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f12fb5515df7dcf4cc1a3ecda848613d64db62cd311a9066dc53c01e4e1c6a83
+size 1064
diff --git a/checkpoint-360/special_tokens_map.json b/checkpoint-360/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-360/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-360/tokenizer.json b/checkpoint-360/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-360/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-360/tokenizer_config.json b/checkpoint-360/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/checkpoint-360/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-360/trainer_state.json b/checkpoint-360/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..0abf5dd143e1a85b007563d7b1a84647b4c00b3c
--- /dev/null
+++ b/checkpoint-360/trainer_state.json
@@ -0,0 +1,2553 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 4.935779816513762,
+ "eval_steps": 500,
+ "global_step": 360,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.013761467889908258,
+ "grad_norm": 38.02450942993164,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 3.125,
+ "step": 1
+ },
+ {
+ "epoch": 0.027522935779816515,
+ "grad_norm": 37.864768981933594,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 3.0998,
+ "step": 2
+ },
+ {
+ "epoch": 0.04128440366972477,
+ "grad_norm": 38.34700012207031,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 3.1533,
+ "step": 3
+ },
+ {
+ "epoch": 0.05504587155963303,
+ "grad_norm": 38.33641815185547,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 3.1542,
+ "step": 4
+ },
+ {
+ "epoch": 0.06880733944954129,
+ "grad_norm": 38.064449310302734,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 3.1153,
+ "step": 5
+ },
+ {
+ "epoch": 0.08256880733944955,
+ "grad_norm": 37.92089080810547,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 3.0867,
+ "step": 6
+ },
+ {
+ "epoch": 0.0963302752293578,
+ "grad_norm": 38.120323181152344,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 3.093,
+ "step": 7
+ },
+ {
+ "epoch": 0.11009174311926606,
+ "grad_norm": 38.47222900390625,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 3.1056,
+ "step": 8
+ },
+ {
+ "epoch": 0.12385321100917432,
+ "grad_norm": 38.013702392578125,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 3.0474,
+ "step": 9
+ },
+ {
+ "epoch": 0.13761467889908258,
+ "grad_norm": 38.17593002319336,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 3.0264,
+ "step": 10
+ },
+ {
+ "epoch": 0.15137614678899083,
+ "grad_norm": 38.60066604614258,
+ "learning_rate": 5.5e-07,
+ "loss": 2.9404,
+ "step": 11
+ },
+ {
+ "epoch": 0.1651376146788991,
+ "grad_norm": 38.83498764038086,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 2.9571,
+ "step": 12
+ },
+ {
+ "epoch": 0.17889908256880735,
+ "grad_norm": 38.942047119140625,
+ "learning_rate": 6.5e-07,
+ "loss": 2.8849,
+ "step": 13
+ },
+ {
+ "epoch": 0.1926605504587156,
+ "grad_norm": 38.0286865234375,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 2.7486,
+ "step": 14
+ },
+ {
+ "epoch": 0.20642201834862386,
+ "grad_norm": 38.31856155395508,
+ "learning_rate": 7.5e-07,
+ "loss": 2.6876,
+ "step": 15
+ },
+ {
+ "epoch": 0.22018348623853212,
+ "grad_norm": 38.124759674072266,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 2.5992,
+ "step": 16
+ },
+ {
+ "epoch": 0.23394495412844038,
+ "grad_norm": 36.59762191772461,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 2.4063,
+ "step": 17
+ },
+ {
+ "epoch": 0.24770642201834864,
+ "grad_norm": 36.63874435424805,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 2.3109,
+ "step": 18
+ },
+ {
+ "epoch": 0.26146788990825687,
+ "grad_norm": 36.768577575683594,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 2.1677,
+ "step": 19
+ },
+ {
+ "epoch": 0.27522935779816515,
+ "grad_norm": 36.187137603759766,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 1.9551,
+ "step": 20
+ },
+ {
+ "epoch": 0.2889908256880734,
+ "grad_norm": 35.55617141723633,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 1.8053,
+ "step": 21
+ },
+ {
+ "epoch": 0.30275229357798167,
+ "grad_norm": 34.60952377319336,
+ "learning_rate": 1.1e-06,
+ "loss": 1.5654,
+ "step": 22
+ },
+ {
+ "epoch": 0.3165137614678899,
+ "grad_norm": 33.69620895385742,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 1.3454,
+ "step": 23
+ },
+ {
+ "epoch": 0.3302752293577982,
+ "grad_norm": 34.33642578125,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 1.2417,
+ "step": 24
+ },
+ {
+ "epoch": 0.3440366972477064,
+ "grad_norm": 31.23066520690918,
+ "learning_rate": 1.25e-06,
+ "loss": 0.9839,
+ "step": 25
+ },
+ {
+ "epoch": 0.3577981651376147,
+ "grad_norm": 25.810237884521484,
+ "learning_rate": 1.3e-06,
+ "loss": 0.793,
+ "step": 26
+ },
+ {
+ "epoch": 0.37155963302752293,
+ "grad_norm": 23.06692886352539,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 0.6082,
+ "step": 27
+ },
+ {
+ "epoch": 0.3853211009174312,
+ "grad_norm": 19.828439712524414,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 0.4845,
+ "step": 28
+ },
+ {
+ "epoch": 0.39908256880733944,
+ "grad_norm": 14.150300025939941,
+ "learning_rate": 1.45e-06,
+ "loss": 0.348,
+ "step": 29
+ },
+ {
+ "epoch": 0.41284403669724773,
+ "grad_norm": 9.044266700744629,
+ "learning_rate": 1.5e-06,
+ "loss": 0.2516,
+ "step": 30
+ },
+ {
+ "epoch": 0.42660550458715596,
+ "grad_norm": 5.704404354095459,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 0.177,
+ "step": 31
+ },
+ {
+ "epoch": 0.44036697247706424,
+ "grad_norm": 3.2953503131866455,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 0.1391,
+ "step": 32
+ },
+ {
+ "epoch": 0.4541284403669725,
+ "grad_norm": 2.453219413757324,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 0.0982,
+ "step": 33
+ },
+ {
+ "epoch": 0.46788990825688076,
+ "grad_norm": 2.0325512886047363,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 0.0807,
+ "step": 34
+ },
+ {
+ "epoch": 0.481651376146789,
+ "grad_norm": 1.6322681903839111,
+ "learning_rate": 1.75e-06,
+ "loss": 0.0725,
+ "step": 35
+ },
+ {
+ "epoch": 0.4954128440366973,
+ "grad_norm": 0.9713364839553833,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 0.067,
+ "step": 36
+ },
+ {
+ "epoch": 0.5091743119266054,
+ "grad_norm": 0.7980225682258606,
+ "learning_rate": 1.85e-06,
+ "loss": 0.0582,
+ "step": 37
+ },
+ {
+ "epoch": 0.5229357798165137,
+ "grad_norm": 1.0616590976715088,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 0.0562,
+ "step": 38
+ },
+ {
+ "epoch": 0.536697247706422,
+ "grad_norm": 1.053462028503418,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 0.0537,
+ "step": 39
+ },
+ {
+ "epoch": 0.5504587155963303,
+ "grad_norm": 0.9452660083770752,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 0.0602,
+ "step": 40
+ },
+ {
+ "epoch": 0.5642201834862385,
+ "grad_norm": 0.830368161201477,
+ "learning_rate": 2.05e-06,
+ "loss": 0.0549,
+ "step": 41
+ },
+ {
+ "epoch": 0.5779816513761468,
+ "grad_norm": 0.5791187882423401,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 0.0479,
+ "step": 42
+ },
+ {
+ "epoch": 0.591743119266055,
+ "grad_norm": 0.44175243377685547,
+ "learning_rate": 2.15e-06,
+ "loss": 0.0461,
+ "step": 43
+ },
+ {
+ "epoch": 0.6055045871559633,
+ "grad_norm": 0.37655699253082275,
+ "learning_rate": 2.2e-06,
+ "loss": 0.043,
+ "step": 44
+ },
+ {
+ "epoch": 0.6192660550458715,
+ "grad_norm": 0.34382495284080505,
+ "learning_rate": 2.25e-06,
+ "loss": 0.0454,
+ "step": 45
+ },
+ {
+ "epoch": 0.6330275229357798,
+ "grad_norm": 0.5047216415405273,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 0.0437,
+ "step": 46
+ },
+ {
+ "epoch": 0.6467889908256881,
+ "grad_norm": 0.6318779587745667,
+ "learning_rate": 2.35e-06,
+ "loss": 0.0468,
+ "step": 47
+ },
+ {
+ "epoch": 0.6605504587155964,
+ "grad_norm": 0.5135455131530762,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 0.0494,
+ "step": 48
+ },
+ {
+ "epoch": 0.6743119266055045,
+ "grad_norm": 0.4802612066268921,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 0.0441,
+ "step": 49
+ },
+ {
+ "epoch": 0.6880733944954128,
+ "grad_norm": 0.6157718300819397,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0398,
+ "step": 50
+ },
+ {
+ "epoch": 0.7018348623853211,
+ "grad_norm": 0.4327130913734436,
+ "learning_rate": 2.55e-06,
+ "loss": 0.0438,
+ "step": 51
+ },
+ {
+ "epoch": 0.7155963302752294,
+ "grad_norm": 0.46133658289909363,
+ "learning_rate": 2.6e-06,
+ "loss": 0.041,
+ "step": 52
+ },
+ {
+ "epoch": 0.7293577981651376,
+ "grad_norm": 0.5729146003723145,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 0.0406,
+ "step": 53
+ },
+ {
+ "epoch": 0.7431192660550459,
+ "grad_norm": 0.32373812794685364,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 0.0419,
+ "step": 54
+ },
+ {
+ "epoch": 0.7568807339449541,
+ "grad_norm": 0.29006752371788025,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 0.0415,
+ "step": 55
+ },
+ {
+ "epoch": 0.7706422018348624,
+ "grad_norm": 0.31038960814476013,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 0.0344,
+ "step": 56
+ },
+ {
+ "epoch": 0.7844036697247706,
+ "grad_norm": 0.2324836701154709,
+ "learning_rate": 2.85e-06,
+ "loss": 0.0374,
+ "step": 57
+ },
+ {
+ "epoch": 0.7981651376146789,
+ "grad_norm": 0.5083625912666321,
+ "learning_rate": 2.9e-06,
+ "loss": 0.0324,
+ "step": 58
+ },
+ {
+ "epoch": 0.8119266055045872,
+ "grad_norm": 0.2873130142688751,
+ "learning_rate": 2.95e-06,
+ "loss": 0.0403,
+ "step": 59
+ },
+ {
+ "epoch": 0.8256880733944955,
+ "grad_norm": 0.437663197517395,
+ "learning_rate": 3e-06,
+ "loss": 0.0368,
+ "step": 60
+ },
+ {
+ "epoch": 0.8394495412844036,
+ "grad_norm": 0.5645247101783752,
+ "learning_rate": 3.05e-06,
+ "loss": 0.0386,
+ "step": 61
+ },
+ {
+ "epoch": 0.8532110091743119,
+ "grad_norm": 0.40374210476875305,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 0.0425,
+ "step": 62
+ },
+ {
+ "epoch": 0.8669724770642202,
+ "grad_norm": 0.46468955278396606,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 0.0323,
+ "step": 63
+ },
+ {
+ "epoch": 0.8807339449541285,
+ "grad_norm": 0.29952895641326904,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 0.0325,
+ "step": 64
+ },
+ {
+ "epoch": 0.8944954128440367,
+ "grad_norm": 0.3678436279296875,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 0.036,
+ "step": 65
+ },
+ {
+ "epoch": 0.908256880733945,
+ "grad_norm": 0.5068934559822083,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 0.0357,
+ "step": 66
+ },
+ {
+ "epoch": 0.9220183486238532,
+ "grad_norm": 0.2723177671432495,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 0.0333,
+ "step": 67
+ },
+ {
+ "epoch": 0.9357798165137615,
+ "grad_norm": 0.41696834564208984,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 0.0347,
+ "step": 68
+ },
+ {
+ "epoch": 0.9495412844036697,
+ "grad_norm": 0.2582981288433075,
+ "learning_rate": 3.45e-06,
+ "loss": 0.0283,
+ "step": 69
+ },
+ {
+ "epoch": 0.963302752293578,
+ "grad_norm": 0.40648311376571655,
+ "learning_rate": 3.5e-06,
+ "loss": 0.0293,
+ "step": 70
+ },
+ {
+ "epoch": 0.9770642201834863,
+ "grad_norm": 0.4149394631385803,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 0.0311,
+ "step": 71
+ },
+ {
+ "epoch": 0.9908256880733946,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 0.0346,
+ "step": 72
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.65e-06,
+ "loss": 0.0291,
+ "step": 73
+ },
+ {
+ "epoch": 1.0137614678899083,
+ "grad_norm": 0.5016496181488037,
+ "learning_rate": 3.7e-06,
+ "loss": 0.0286,
+ "step": 74
+ },
+ {
+ "epoch": 1.0275229357798166,
+ "grad_norm": 0.3533766567707062,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 0.0321,
+ "step": 75
+ },
+ {
+ "epoch": 1.0412844036697249,
+ "grad_norm": 0.2785470485687256,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 0.0277,
+ "step": 76
+ },
+ {
+ "epoch": 1.0550458715596331,
+ "grad_norm": 0.4530641436576843,
+ "learning_rate": 3.85e-06,
+ "loss": 0.0294,
+ "step": 77
+ },
+ {
+ "epoch": 1.0688073394495412,
+ "grad_norm": 0.3170749843120575,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.0274,
+ "step": 78
+ },
+ {
+ "epoch": 1.0825688073394495,
+ "grad_norm": 0.26502758264541626,
+ "learning_rate": 3.95e-06,
+ "loss": 0.0284,
+ "step": 79
+ },
+ {
+ "epoch": 1.0963302752293578,
+ "grad_norm": 0.5486436486244202,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.0285,
+ "step": 80
+ },
+ {
+ "epoch": 1.110091743119266,
+ "grad_norm": 0.24868083000183105,
+ "learning_rate": 4.05e-06,
+ "loss": 0.0301,
+ "step": 81
+ },
+ {
+ "epoch": 1.1238532110091743,
+ "grad_norm": 0.3448987305164337,
+ "learning_rate": 4.1e-06,
+ "loss": 0.0261,
+ "step": 82
+ },
+ {
+ "epoch": 1.1376146788990826,
+ "grad_norm": 0.3330553472042084,
+ "learning_rate": 4.15e-06,
+ "loss": 0.03,
+ "step": 83
+ },
+ {
+ "epoch": 1.151376146788991,
+ "grad_norm": 0.3379852771759033,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.0286,
+ "step": 84
+ },
+ {
+ "epoch": 1.165137614678899,
+ "grad_norm": 0.23678433895111084,
+ "learning_rate": 4.25e-06,
+ "loss": 0.0245,
+ "step": 85
+ },
+ {
+ "epoch": 1.1788990825688073,
+ "grad_norm": 0.24502314627170563,
+ "learning_rate": 4.3e-06,
+ "loss": 0.0267,
+ "step": 86
+ },
+ {
+ "epoch": 1.1926605504587156,
+ "grad_norm": 0.34288597106933594,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.0259,
+ "step": 87
+ },
+ {
+ "epoch": 1.2064220183486238,
+ "grad_norm": 0.20595045387744904,
+ "learning_rate": 4.4e-06,
+ "loss": 0.0241,
+ "step": 88
+ },
+ {
+ "epoch": 1.2201834862385321,
+ "grad_norm": 0.28399360179901123,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.0277,
+ "step": 89
+ },
+ {
+ "epoch": 1.2339449541284404,
+ "grad_norm": 0.273929238319397,
+ "learning_rate": 4.5e-06,
+ "loss": 0.0261,
+ "step": 90
+ },
+ {
+ "epoch": 1.2477064220183487,
+ "grad_norm": 0.24288330972194672,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.0267,
+ "step": 91
+ },
+ {
+ "epoch": 1.261467889908257,
+ "grad_norm": 0.42502400279045105,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.0252,
+ "step": 92
+ },
+ {
+ "epoch": 1.2752293577981653,
+ "grad_norm": 0.17670072615146637,
+ "learning_rate": 4.65e-06,
+ "loss": 0.0231,
+ "step": 93
+ },
+ {
+ "epoch": 1.2889908256880733,
+ "grad_norm": 0.23585423827171326,
+ "learning_rate": 4.7e-06,
+ "loss": 0.0213,
+ "step": 94
+ },
+ {
+ "epoch": 1.3027522935779816,
+ "grad_norm": 0.32558879256248474,
+ "learning_rate": 4.75e-06,
+ "loss": 0.0226,
+ "step": 95
+ },
+ {
+ "epoch": 1.31651376146789,
+ "grad_norm": 0.2908780872821808,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.0274,
+ "step": 96
+ },
+ {
+ "epoch": 1.3302752293577982,
+ "grad_norm": 0.3725607991218567,
+ "learning_rate": 4.85e-06,
+ "loss": 0.0241,
+ "step": 97
+ },
+ {
+ "epoch": 1.3440366972477065,
+ "grad_norm": 0.3833301067352295,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.0252,
+ "step": 98
+ },
+ {
+ "epoch": 1.3577981651376148,
+ "grad_norm": 0.32000771164894104,
+ "learning_rate": 4.95e-06,
+ "loss": 0.0236,
+ "step": 99
+ },
+ {
+ "epoch": 1.3715596330275228,
+ "grad_norm": 0.3203510344028473,
+ "learning_rate": 5e-06,
+ "loss": 0.0235,
+ "step": 100
+ },
+ {
+ "epoch": 1.385321100917431,
+ "grad_norm": 0.20016217231750488,
+ "learning_rate": 4.999888074163108e-06,
+ "loss": 0.0218,
+ "step": 101
+ },
+ {
+ "epoch": 1.3990825688073394,
+ "grad_norm": 0.3012026250362396,
+ "learning_rate": 4.999552306674345e-06,
+ "loss": 0.0234,
+ "step": 102
+ },
+ {
+ "epoch": 1.4128440366972477,
+ "grad_norm": 0.22818222641944885,
+ "learning_rate": 4.998992727598557e-06,
+ "loss": 0.0212,
+ "step": 103
+ },
+ {
+ "epoch": 1.426605504587156,
+ "grad_norm": 0.24128392338752747,
+ "learning_rate": 4.998209387040829e-06,
+ "loss": 0.0207,
+ "step": 104
+ },
+ {
+ "epoch": 1.4403669724770642,
+ "grad_norm": 0.3475555181503296,
+ "learning_rate": 4.9972023551419995e-06,
+ "loss": 0.0246,
+ "step": 105
+ },
+ {
+ "epoch": 1.4541284403669725,
+ "grad_norm": 0.21223627030849457,
+ "learning_rate": 4.995971722072379e-06,
+ "loss": 0.0237,
+ "step": 106
+ },
+ {
+ "epoch": 1.4678899082568808,
+ "grad_norm": 0.3776336908340454,
+ "learning_rate": 4.9945175980236745e-06,
+ "loss": 0.0218,
+ "step": 107
+ },
+ {
+ "epoch": 1.481651376146789,
+ "grad_norm": 0.25027793645858765,
+ "learning_rate": 4.992840113199131e-06,
+ "loss": 0.0214,
+ "step": 108
+ },
+ {
+ "epoch": 1.4954128440366974,
+ "grad_norm": 0.2559281885623932,
+ "learning_rate": 4.990939417801859e-06,
+ "loss": 0.0213,
+ "step": 109
+ },
+ {
+ "epoch": 1.5091743119266054,
+ "grad_norm": 0.33694687485694885,
+ "learning_rate": 4.988815682021398e-06,
+ "loss": 0.0182,
+ "step": 110
+ },
+ {
+ "epoch": 1.5229357798165137,
+ "grad_norm": 0.3175147473812103,
+ "learning_rate": 4.986469096018472e-06,
+ "loss": 0.0213,
+ "step": 111
+ },
+ {
+ "epoch": 1.536697247706422,
+ "grad_norm": 0.48777177929878235,
+ "learning_rate": 4.983899869907963e-06,
+ "loss": 0.0186,
+ "step": 112
+ },
+ {
+ "epoch": 1.5504587155963303,
+ "grad_norm": 0.34633246064186096,
+ "learning_rate": 4.981108233740096e-06,
+ "loss": 0.0224,
+ "step": 113
+ },
+ {
+ "epoch": 1.5642201834862384,
+ "grad_norm": 0.17302758991718292,
+ "learning_rate": 4.978094437479843e-06,
+ "loss": 0.0189,
+ "step": 114
+ },
+ {
+ "epoch": 1.5779816513761467,
+ "grad_norm": 0.24110931158065796,
+ "learning_rate": 4.97485875098454e-06,
+ "loss": 0.0174,
+ "step": 115
+ },
+ {
+ "epoch": 1.591743119266055,
+ "grad_norm": 0.2965283989906311,
+ "learning_rate": 4.971401463979722e-06,
+ "loss": 0.0174,
+ "step": 116
+ },
+ {
+ "epoch": 1.6055045871559632,
+ "grad_norm": 0.34184500575065613,
+ "learning_rate": 4.967722886033181e-06,
+ "loss": 0.0162,
+ "step": 117
+ },
+ {
+ "epoch": 1.6192660550458715,
+ "grad_norm": 0.3942873179912567,
+ "learning_rate": 4.963823346527249e-06,
+ "loss": 0.0176,
+ "step": 118
+ },
+ {
+ "epoch": 1.6330275229357798,
+ "grad_norm": 0.21840929985046387,
+ "learning_rate": 4.959703194629304e-06,
+ "loss": 0.018,
+ "step": 119
+ },
+ {
+ "epoch": 1.646788990825688,
+ "grad_norm": 0.23672759532928467,
+ "learning_rate": 4.955362799260507e-06,
+ "loss": 0.0204,
+ "step": 120
+ },
+ {
+ "epoch": 1.6605504587155964,
+ "grad_norm": 0.18776445090770721,
+ "learning_rate": 4.950802549062764e-06,
+ "loss": 0.0173,
+ "step": 121
+ },
+ {
+ "epoch": 1.6743119266055047,
+ "grad_norm": 0.279297411441803,
+ "learning_rate": 4.946022852363932e-06,
+ "loss": 0.0177,
+ "step": 122
+ },
+ {
+ "epoch": 1.688073394495413,
+ "grad_norm": 0.20893588662147522,
+ "learning_rate": 4.9410241371412525e-06,
+ "loss": 0.0189,
+ "step": 123
+ },
+ {
+ "epoch": 1.7018348623853212,
+ "grad_norm": 0.26409876346588135,
+ "learning_rate": 4.935806850983034e-06,
+ "loss": 0.0195,
+ "step": 124
+ },
+ {
+ "epoch": 1.7155963302752295,
+ "grad_norm": 0.23863324522972107,
+ "learning_rate": 4.9303714610485705e-06,
+ "loss": 0.0151,
+ "step": 125
+ },
+ {
+ "epoch": 1.7293577981651376,
+ "grad_norm": 0.25934213399887085,
+ "learning_rate": 4.924718454026318e-06,
+ "loss": 0.0157,
+ "step": 126
+ },
+ {
+ "epoch": 1.7431192660550459,
+ "grad_norm": 0.2923693358898163,
+ "learning_rate": 4.918848336090309e-06,
+ "loss": 0.0155,
+ "step": 127
+ },
+ {
+ "epoch": 1.7568807339449541,
+ "grad_norm": 0.16973069310188293,
+ "learning_rate": 4.912761632854834e-06,
+ "loss": 0.0156,
+ "step": 128
+ },
+ {
+ "epoch": 1.7706422018348624,
+ "grad_norm": 0.25908610224723816,
+ "learning_rate": 4.906458889327375e-06,
+ "loss": 0.0159,
+ "step": 129
+ },
+ {
+ "epoch": 1.7844036697247705,
+ "grad_norm": 0.27444136142730713,
+ "learning_rate": 4.899940669859807e-06,
+ "loss": 0.0159,
+ "step": 130
+ },
+ {
+ "epoch": 1.7981651376146788,
+ "grad_norm": 0.25167539715766907,
+ "learning_rate": 4.893207558097867e-06,
+ "loss": 0.0147,
+ "step": 131
+ },
+ {
+ "epoch": 1.811926605504587,
+ "grad_norm": 0.25777608156204224,
+ "learning_rate": 4.8862601569288885e-06,
+ "loss": 0.016,
+ "step": 132
+ },
+ {
+ "epoch": 1.8256880733944953,
+ "grad_norm": 0.24190428853034973,
+ "learning_rate": 4.879099088427824e-06,
+ "loss": 0.0128,
+ "step": 133
+ },
+ {
+ "epoch": 1.8394495412844036,
+ "grad_norm": 0.17028410732746124,
+ "learning_rate": 4.871724993801541e-06,
+ "loss": 0.0145,
+ "step": 134
+ },
+ {
+ "epoch": 1.853211009174312,
+ "grad_norm": 0.25141075253486633,
+ "learning_rate": 4.864138533331411e-06,
+ "loss": 0.0144,
+ "step": 135
+ },
+ {
+ "epoch": 1.8669724770642202,
+ "grad_norm": 0.6400424838066101,
+ "learning_rate": 4.8563403863141825e-06,
+ "loss": 0.0128,
+ "step": 136
+ },
+ {
+ "epoch": 1.8807339449541285,
+ "grad_norm": 0.22450514137744904,
+ "learning_rate": 4.84833125100116e-06,
+ "loss": 0.0167,
+ "step": 137
+ },
+ {
+ "epoch": 1.8944954128440368,
+ "grad_norm": 0.19940154254436493,
+ "learning_rate": 4.840111844535682e-06,
+ "loss": 0.0154,
+ "step": 138
+ },
+ {
+ "epoch": 1.908256880733945,
+ "grad_norm": 0.2946206033229828,
+ "learning_rate": 4.8316829028889076e-06,
+ "loss": 0.0158,
+ "step": 139
+ },
+ {
+ "epoch": 1.9220183486238533,
+ "grad_norm": 0.4694023132324219,
+ "learning_rate": 4.823045180793914e-06,
+ "loss": 0.0153,
+ "step": 140
+ },
+ {
+ "epoch": 1.9357798165137616,
+ "grad_norm": 0.23655226826667786,
+ "learning_rate": 4.8141994516781196e-06,
+ "loss": 0.0109,
+ "step": 141
+ },
+ {
+ "epoch": 1.9495412844036697,
+ "grad_norm": 0.23846553266048431,
+ "learning_rate": 4.805146507594034e-06,
+ "loss": 0.0129,
+ "step": 142
+ },
+ {
+ "epoch": 1.963302752293578,
+ "grad_norm": 0.2763686180114746,
+ "learning_rate": 4.7958871591483305e-06,
+ "loss": 0.0124,
+ "step": 143
+ },
+ {
+ "epoch": 1.9770642201834863,
+ "grad_norm": 0.23522883653640747,
+ "learning_rate": 4.786422235429269e-06,
+ "loss": 0.0125,
+ "step": 144
+ },
+ {
+ "epoch": 1.9908256880733946,
+ "grad_norm": 0.27468597888946533,
+ "learning_rate": 4.776752583932455e-06,
+ "loss": 0.0137,
+ "step": 145
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.26614058017730713,
+ "learning_rate": 4.766879070484957e-06,
+ "loss": 0.0113,
+ "step": 146
+ },
+ {
+ "epoch": 2.0137614678899083,
+ "grad_norm": 0.2862620949745178,
+ "learning_rate": 4.756802579167781e-06,
+ "loss": 0.0104,
+ "step": 147
+ },
+ {
+ "epoch": 2.0275229357798166,
+ "grad_norm": 0.1770494133234024,
+ "learning_rate": 4.746524012236706e-06,
+ "loss": 0.0102,
+ "step": 148
+ },
+ {
+ "epoch": 2.041284403669725,
+ "grad_norm": 0.1850830465555191,
+ "learning_rate": 4.736044290041496e-06,
+ "loss": 0.0125,
+ "step": 149
+ },
+ {
+ "epoch": 2.055045871559633,
+ "grad_norm": 0.20552438497543335,
+ "learning_rate": 4.725364350943492e-06,
+ "loss": 0.0091,
+ "step": 150
+ },
+ {
+ "epoch": 2.0688073394495414,
+ "grad_norm": 0.12979158759117126,
+ "learning_rate": 4.714485151231593e-06,
+ "loss": 0.0113,
+ "step": 151
+ },
+ {
+ "epoch": 2.0825688073394497,
+ "grad_norm": 0.16992178559303284,
+ "learning_rate": 4.703407665036622e-06,
+ "loss": 0.0075,
+ "step": 152
+ },
+ {
+ "epoch": 2.096330275229358,
+ "grad_norm": 0.1473352611064911,
+ "learning_rate": 4.692132884244113e-06,
+ "loss": 0.0091,
+ "step": 153
+ },
+ {
+ "epoch": 2.1100917431192663,
+ "grad_norm": 0.24198868870735168,
+ "learning_rate": 4.680661818405485e-06,
+ "loss": 0.0073,
+ "step": 154
+ },
+ {
+ "epoch": 2.123853211009174,
+ "grad_norm": 0.25488734245300293,
+ "learning_rate": 4.668995494647653e-06,
+ "loss": 0.0098,
+ "step": 155
+ },
+ {
+ "epoch": 2.1376146788990824,
+ "grad_norm": 0.17986920475959778,
+ "learning_rate": 4.657134957581057e-06,
+ "loss": 0.0081,
+ "step": 156
+ },
+ {
+ "epoch": 2.1513761467889907,
+ "grad_norm": 0.22465726733207703,
+ "learning_rate": 4.645081269206128e-06,
+ "loss": 0.0074,
+ "step": 157
+ },
+ {
+ "epoch": 2.165137614678899,
+ "grad_norm": 0.19391046464443207,
+ "learning_rate": 4.632835508818192e-06,
+ "loss": 0.0077,
+ "step": 158
+ },
+ {
+ "epoch": 2.1788990825688073,
+ "grad_norm": 0.25997886061668396,
+ "learning_rate": 4.620398772910833e-06,
+ "loss": 0.0074,
+ "step": 159
+ },
+ {
+ "epoch": 2.1926605504587156,
+ "grad_norm": 0.2422141134738922,
+ "learning_rate": 4.607772175077712e-06,
+ "loss": 0.0103,
+ "step": 160
+ },
+ {
+ "epoch": 2.206422018348624,
+ "grad_norm": 0.2785587012767792,
+ "learning_rate": 4.59495684591285e-06,
+ "loss": 0.0079,
+ "step": 161
+ },
+ {
+ "epoch": 2.220183486238532,
+ "grad_norm": 0.19141560792922974,
+ "learning_rate": 4.581953932909403e-06,
+ "loss": 0.0068,
+ "step": 162
+ },
+ {
+ "epoch": 2.2339449541284404,
+ "grad_norm": 0.152049720287323,
+ "learning_rate": 4.5687646003569055e-06,
+ "loss": 0.0063,
+ "step": 163
+ },
+ {
+ "epoch": 2.2477064220183487,
+ "grad_norm": 0.19313585758209229,
+ "learning_rate": 4.555390029237026e-06,
+ "loss": 0.0079,
+ "step": 164
+ },
+ {
+ "epoch": 2.261467889908257,
+ "grad_norm": 0.19979022443294525,
+ "learning_rate": 4.541831417117815e-06,
+ "loss": 0.0085,
+ "step": 165
+ },
+ {
+ "epoch": 2.2752293577981653,
+ "grad_norm": 0.2902522683143616,
+ "learning_rate": 4.528089978046481e-06,
+ "loss": 0.0082,
+ "step": 166
+ },
+ {
+ "epoch": 2.2889908256880735,
+ "grad_norm": 0.5130491256713867,
+ "learning_rate": 4.514166942440679e-06,
+ "loss": 0.0067,
+ "step": 167
+ },
+ {
+ "epoch": 2.302752293577982,
+ "grad_norm": 0.15980036556720734,
+ "learning_rate": 4.5000635569783365e-06,
+ "loss": 0.0093,
+ "step": 168
+ },
+ {
+ "epoch": 2.31651376146789,
+ "grad_norm": 0.19247184693813324,
+ "learning_rate": 4.4857810844860325e-06,
+ "loss": 0.0101,
+ "step": 169
+ },
+ {
+ "epoch": 2.330275229357798,
+ "grad_norm": 0.19173133373260498,
+ "learning_rate": 4.471320803825915e-06,
+ "loss": 0.0065,
+ "step": 170
+ },
+ {
+ "epoch": 2.3440366972477062,
+ "grad_norm": 0.1779325157403946,
+ "learning_rate": 4.4566840097811956e-06,
+ "loss": 0.0059,
+ "step": 171
+ },
+ {
+ "epoch": 2.3577981651376145,
+ "grad_norm": 0.1639624983072281,
+ "learning_rate": 4.4418720129402145e-06,
+ "loss": 0.0052,
+ "step": 172
+ },
+ {
+ "epoch": 2.371559633027523,
+ "grad_norm": 0.2355070263147354,
+ "learning_rate": 4.426886139579083e-06,
+ "loss": 0.0046,
+ "step": 173
+ },
+ {
+ "epoch": 2.385321100917431,
+ "grad_norm": 0.20461603999137878,
+ "learning_rate": 4.411727731542937e-06,
+ "loss": 0.0071,
+ "step": 174
+ },
+ {
+ "epoch": 2.3990825688073394,
+ "grad_norm": 0.20251843333244324,
+ "learning_rate": 4.39639814612578e-06,
+ "loss": 0.0054,
+ "step": 175
+ },
+ {
+ "epoch": 2.4128440366972477,
+ "grad_norm": 0.17055197060108185,
+ "learning_rate": 4.3808987559489536e-06,
+ "loss": 0.0052,
+ "step": 176
+ },
+ {
+ "epoch": 2.426605504587156,
+ "grad_norm": 0.2508833706378937,
+ "learning_rate": 4.365230948838232e-06,
+ "loss": 0.0071,
+ "step": 177
+ },
+ {
+ "epoch": 2.4403669724770642,
+ "grad_norm": 0.21865351498126984,
+ "learning_rate": 4.349396127699552e-06,
+ "loss": 0.0068,
+ "step": 178
+ },
+ {
+ "epoch": 2.4541284403669725,
+ "grad_norm": 0.225867360830307,
+ "learning_rate": 4.3333957103934025e-06,
+ "loss": 0.0072,
+ "step": 179
+ },
+ {
+ "epoch": 2.467889908256881,
+ "grad_norm": 0.48035845160484314,
+ "learning_rate": 4.317231129607859e-06,
+ "loss": 0.006,
+ "step": 180
+ },
+ {
+ "epoch": 2.481651376146789,
+ "grad_norm": 0.17027413845062256,
+ "learning_rate": 4.30090383273031e-06,
+ "loss": 0.0043,
+ "step": 181
+ },
+ {
+ "epoch": 2.4954128440366974,
+ "grad_norm": 0.2660333216190338,
+ "learning_rate": 4.2844152817178476e-06,
+ "loss": 0.0082,
+ "step": 182
+ },
+ {
+ "epoch": 2.5091743119266052,
+ "grad_norm": 0.2485552728176117,
+ "learning_rate": 4.267766952966369e-06,
+ "loss": 0.0059,
+ "step": 183
+ },
+ {
+ "epoch": 2.522935779816514,
+ "grad_norm": 0.2048122137784958,
+ "learning_rate": 4.2509603371783776e-06,
+ "loss": 0.0071,
+ "step": 184
+ },
+ {
+ "epoch": 2.536697247706422,
+ "grad_norm": 0.1725567877292633,
+ "learning_rate": 4.233996939229502e-06,
+ "loss": 0.0053,
+ "step": 185
+ },
+ {
+ "epoch": 2.5504587155963305,
+ "grad_norm": 0.14592835307121277,
+ "learning_rate": 4.216878278033753e-06,
+ "loss": 0.0044,
+ "step": 186
+ },
+ {
+ "epoch": 2.5642201834862384,
+ "grad_norm": 0.2868310213088989,
+ "learning_rate": 4.199605886407515e-06,
+ "loss": 0.0057,
+ "step": 187
+ },
+ {
+ "epoch": 2.5779816513761467,
+ "grad_norm": 0.5002567172050476,
+ "learning_rate": 4.1821813109322975e-06,
+ "loss": 0.0072,
+ "step": 188
+ },
+ {
+ "epoch": 2.591743119266055,
+ "grad_norm": 0.199919655919075,
+ "learning_rate": 4.164606111816256e-06,
+ "loss": 0.0043,
+ "step": 189
+ },
+ {
+ "epoch": 2.6055045871559632,
+ "grad_norm": 0.17280228435993195,
+ "learning_rate": 4.146881862754485e-06,
+ "loss": 0.0048,
+ "step": 190
+ },
+ {
+ "epoch": 2.6192660550458715,
+ "grad_norm": 0.17505577206611633,
+ "learning_rate": 4.129010150788112e-06,
+ "loss": 0.0037,
+ "step": 191
+ },
+ {
+ "epoch": 2.63302752293578,
+ "grad_norm": 0.21955189108848572,
+ "learning_rate": 4.110992576162193e-06,
+ "loss": 0.0044,
+ "step": 192
+ },
+ {
+ "epoch": 2.646788990825688,
+ "grad_norm": 0.22081787884235382,
+ "learning_rate": 4.092830752182423e-06,
+ "loss": 0.0036,
+ "step": 193
+ },
+ {
+ "epoch": 2.6605504587155964,
+ "grad_norm": 0.26361310482025146,
+ "learning_rate": 4.074526305070679e-06,
+ "loss": 0.0062,
+ "step": 194
+ },
+ {
+ "epoch": 2.6743119266055047,
+ "grad_norm": 0.15528841316699982,
+ "learning_rate": 4.056080873819412e-06,
+ "loss": 0.0029,
+ "step": 195
+ },
+ {
+ "epoch": 2.688073394495413,
+ "grad_norm": 6.002184867858887,
+ "learning_rate": 4.037496110044885e-06,
+ "loss": 0.0062,
+ "step": 196
+ },
+ {
+ "epoch": 2.7018348623853212,
+ "grad_norm": 0.12827160954475403,
+ "learning_rate": 4.018773677839289e-06,
+ "loss": 0.0025,
+ "step": 197
+ },
+ {
+ "epoch": 2.7155963302752295,
+ "grad_norm": 0.21857935190200806,
+ "learning_rate": 3.999915253621739e-06,
+ "loss": 0.004,
+ "step": 198
+ },
+ {
+ "epoch": 2.729357798165138,
+ "grad_norm": 0.231048583984375,
+ "learning_rate": 3.980922525988167e-06,
+ "loss": 0.0034,
+ "step": 199
+ },
+ {
+ "epoch": 2.7431192660550456,
+ "grad_norm": 0.16589054465293884,
+ "learning_rate": 3.961797195560118e-06,
+ "loss": 0.0045,
+ "step": 200
+ },
+ {
+ "epoch": 2.7568807339449544,
+ "grad_norm": 0.2721656858921051,
+ "learning_rate": 3.942540974832486e-06,
+ "loss": 0.0056,
+ "step": 201
+ },
+ {
+ "epoch": 2.770642201834862,
+ "grad_norm": 0.12930598855018616,
+ "learning_rate": 3.9231555880201655e-06,
+ "loss": 0.0037,
+ "step": 202
+ },
+ {
+ "epoch": 2.7844036697247705,
+ "grad_norm": 0.299055814743042,
+ "learning_rate": 3.903642770903671e-06,
+ "loss": 0.0036,
+ "step": 203
+ },
+ {
+ "epoch": 2.7981651376146788,
+ "grad_norm": 0.15592966973781586,
+ "learning_rate": 3.884004270673711e-06,
+ "loss": 0.0023,
+ "step": 204
+ },
+ {
+ "epoch": 2.811926605504587,
+ "grad_norm": 0.10496195405721664,
+ "learning_rate": 3.864241845774746e-06,
+ "loss": 0.0029,
+ "step": 205
+ },
+ {
+ "epoch": 2.8256880733944953,
+ "grad_norm": 0.1604638695716858,
+ "learning_rate": 3.844357265747531e-06,
+ "loss": 0.0032,
+ "step": 206
+ },
+ {
+ "epoch": 2.8394495412844036,
+ "grad_norm": 0.1436438113451004,
+ "learning_rate": 3.8243523110706736e-06,
+ "loss": 0.0035,
+ "step": 207
+ },
+ {
+ "epoch": 2.853211009174312,
+ "grad_norm": 0.176204651594162,
+ "learning_rate": 3.8042287730012117e-06,
+ "loss": 0.0022,
+ "step": 208
+ },
+ {
+ "epoch": 2.86697247706422,
+ "grad_norm": 0.3500923216342926,
+ "learning_rate": 3.7839884534142157e-06,
+ "loss": 0.0022,
+ "step": 209
+ },
+ {
+ "epoch": 2.8807339449541285,
+ "grad_norm": 0.2319999784231186,
+ "learning_rate": 3.7636331646414524e-06,
+ "loss": 0.003,
+ "step": 210
+ },
+ {
+ "epoch": 2.8944954128440368,
+ "grad_norm": 0.1358226090669632,
+ "learning_rate": 3.7431647293091076e-06,
+ "loss": 0.0023,
+ "step": 211
+ },
+ {
+ "epoch": 2.908256880733945,
+ "grad_norm": 0.1323612779378891,
+ "learning_rate": 3.7225849801745835e-06,
+ "loss": 0.0021,
+ "step": 212
+ },
+ {
+ "epoch": 2.9220183486238533,
+ "grad_norm": 0.10968377441167831,
+ "learning_rate": 3.701895759962397e-06,
+ "loss": 0.002,
+ "step": 213
+ },
+ {
+ "epoch": 2.9357798165137616,
+ "grad_norm": 0.11163649708032608,
+ "learning_rate": 3.6810989211991777e-06,
+ "loss": 0.0015,
+ "step": 214
+ },
+ {
+ "epoch": 2.9495412844036695,
+ "grad_norm": 0.6103344559669495,
+ "learning_rate": 3.6601963260477923e-06,
+ "loss": 0.0051,
+ "step": 215
+ },
+ {
+ "epoch": 2.963302752293578,
+ "grad_norm": 0.1300484985113144,
+ "learning_rate": 3.6391898461406045e-06,
+ "loss": 0.0018,
+ "step": 216
+ },
+ {
+ "epoch": 2.977064220183486,
+ "grad_norm": 0.11599847674369812,
+ "learning_rate": 3.6180813624118898e-06,
+ "loss": 0.0021,
+ "step": 217
+ },
+ {
+ "epoch": 2.9908256880733948,
+ "grad_norm": 0.14168186485767365,
+ "learning_rate": 3.5968727649294134e-06,
+ "loss": 0.0018,
+ "step": 218
+ },
+ {
+ "epoch": 3.0,
+ "grad_norm": 0.16039852797985077,
+ "learning_rate": 3.575565952725193e-06,
+ "loss": 0.0014,
+ "step": 219
+ },
+ {
+ "epoch": 3.0137614678899083,
+ "grad_norm": 0.08175123482942581,
+ "learning_rate": 3.55416283362546e-06,
+ "loss": 0.0008,
+ "step": 220
+ },
+ {
+ "epoch": 3.0275229357798166,
+ "grad_norm": 0.07637064158916473,
+ "learning_rate": 3.5326653240798283e-06,
+ "loss": 0.0007,
+ "step": 221
+ },
+ {
+ "epoch": 3.041284403669725,
+ "grad_norm": 0.061755988746881485,
+ "learning_rate": 3.5110753489896924e-06,
+ "loss": 0.0007,
+ "step": 222
+ },
+ {
+ "epoch": 3.055045871559633,
+ "grad_norm": 0.05573924630880356,
+ "learning_rate": 3.4893948415358803e-06,
+ "loss": 0.0008,
+ "step": 223
+ },
+ {
+ "epoch": 3.0688073394495414,
+ "grad_norm": 0.18670693039894104,
+ "learning_rate": 3.4676257430055438e-06,
+ "loss": 0.0007,
+ "step": 224
+ },
+ {
+ "epoch": 3.0825688073394497,
+ "grad_norm": 0.05674147605895996,
+ "learning_rate": 3.4457700026183378e-06,
+ "loss": 0.0008,
+ "step": 225
+ },
+ {
+ "epoch": 3.096330275229358,
+ "grad_norm": 0.08101407438516617,
+ "learning_rate": 3.4238295773518924e-06,
+ "loss": 0.0006,
+ "step": 226
+ },
+ {
+ "epoch": 3.1100917431192663,
+ "grad_norm": 0.09028138220310211,
+ "learning_rate": 3.4018064317665745e-06,
+ "loss": 0.0011,
+ "step": 227
+ },
+ {
+ "epoch": 3.123853211009174,
+ "grad_norm": 0.0955042913556099,
+ "learning_rate": 3.3797025378295826e-06,
+ "loss": 0.0008,
+ "step": 228
+ },
+ {
+ "epoch": 3.1376146788990824,
+ "grad_norm": 0.09355667978525162,
+ "learning_rate": 3.357519874738382e-06,
+ "loss": 0.0011,
+ "step": 229
+ },
+ {
+ "epoch": 3.1513761467889907,
+ "grad_norm": 0.06623287498950958,
+ "learning_rate": 3.3352604287434752e-06,
+ "loss": 0.0006,
+ "step": 230
+ },
+ {
+ "epoch": 3.165137614678899,
+ "grad_norm": 0.07880504429340363,
+ "learning_rate": 3.31292619297056e-06,
+ "loss": 0.0007,
+ "step": 231
+ },
+ {
+ "epoch": 3.1788990825688073,
+ "grad_norm": 0.06904889643192291,
+ "learning_rate": 3.29051916724206e-06,
+ "loss": 0.0005,
+ "step": 232
+ },
+ {
+ "epoch": 3.1926605504587156,
+ "grad_norm": 0.23911181092262268,
+ "learning_rate": 3.2680413578980623e-06,
+ "loss": 0.0007,
+ "step": 233
+ },
+ {
+ "epoch": 3.206422018348624,
+ "grad_norm": 0.15154607594013214,
+ "learning_rate": 3.245494777616664e-06,
+ "loss": 0.0015,
+ "step": 234
+ },
+ {
+ "epoch": 3.220183486238532,
+ "grad_norm": 0.15627366304397583,
+ "learning_rate": 3.2228814452337587e-06,
+ "loss": 0.001,
+ "step": 235
+ },
+ {
+ "epoch": 3.2339449541284404,
+ "grad_norm": 0.0780797079205513,
+ "learning_rate": 3.2002033855622683e-06,
+ "loss": 0.0005,
+ "step": 236
+ },
+ {
+ "epoch": 3.2477064220183487,
+ "grad_norm": 0.7883831858634949,
+ "learning_rate": 3.177462629210838e-06,
+ "loss": 0.0005,
+ "step": 237
+ },
+ {
+ "epoch": 3.261467889908257,
+ "grad_norm": 0.18375582993030548,
+ "learning_rate": 3.154661212402017e-06,
+ "loss": 0.0006,
+ "step": 238
+ },
+ {
+ "epoch": 3.2752293577981653,
+ "grad_norm": 0.09809675812721252,
+ "learning_rate": 3.131801176789934e-06,
+ "loss": 0.001,
+ "step": 239
+ },
+ {
+ "epoch": 3.2889908256880735,
+ "grad_norm": 0.04473511874675751,
+ "learning_rate": 3.1088845692774798e-06,
+ "loss": 0.0002,
+ "step": 240
+ },
+ {
+ "epoch": 3.302752293577982,
+ "grad_norm": 0.07583656907081604,
+ "learning_rate": 3.0859134418330373e-06,
+ "loss": 0.0007,
+ "step": 241
+ },
+ {
+ "epoch": 3.31651376146789,
+ "grad_norm": 0.06534383445978165,
+ "learning_rate": 3.0628898513067357e-06,
+ "loss": 0.0003,
+ "step": 242
+ },
+ {
+ "epoch": 3.330275229357798,
+ "grad_norm": 0.09651501476764679,
+ "learning_rate": 3.0398158592462847e-06,
+ "loss": 0.0012,
+ "step": 243
+ },
+ {
+ "epoch": 3.3440366972477062,
+ "grad_norm": 0.07052983343601227,
+ "learning_rate": 3.0166935317123824e-06,
+ "loss": 0.0007,
+ "step": 244
+ },
+ {
+ "epoch": 3.3577981651376145,
+ "grad_norm": 0.09956210106611252,
+ "learning_rate": 2.9935249390937184e-06,
+ "loss": 0.0009,
+ "step": 245
+ },
+ {
+ "epoch": 3.371559633027523,
+ "grad_norm": 0.051604535430669785,
+ "learning_rate": 2.970312155921585e-06,
+ "loss": 0.0005,
+ "step": 246
+ },
+ {
+ "epoch": 3.385321100917431,
+ "grad_norm": 0.16182328760623932,
+ "learning_rate": 2.9470572606841295e-06,
+ "loss": 0.0006,
+ "step": 247
+ },
+ {
+ "epoch": 3.3990825688073394,
+ "grad_norm": 0.07653603702783585,
+ "learning_rate": 2.9237623356402423e-06,
+ "loss": 0.0005,
+ "step": 248
+ },
+ {
+ "epoch": 3.4128440366972477,
+ "grad_norm": 0.0773971900343895,
+ "learning_rate": 2.900429466633107e-06,
+ "loss": 0.0009,
+ "step": 249
+ },
+ {
+ "epoch": 3.426605504587156,
+ "grad_norm": 0.1629229336977005,
+ "learning_rate": 2.8770607429034352e-06,
+ "loss": 0.0013,
+ "step": 250
+ },
+ {
+ "epoch": 3.4403669724770642,
+ "grad_norm": 0.1734458953142166,
+ "learning_rate": 2.8536582569023964e-06,
+ "loss": 0.0009,
+ "step": 251
+ },
+ {
+ "epoch": 3.4541284403669725,
+ "grad_norm": 0.08963964134454727,
+ "learning_rate": 2.8302241041042564e-06,
+ "loss": 0.0004,
+ "step": 252
+ },
+ {
+ "epoch": 3.467889908256881,
+ "grad_norm": 0.037656184285879135,
+ "learning_rate": 2.8067603828187446e-06,
+ "loss": 0.0002,
+ "step": 253
+ },
+ {
+ "epoch": 3.481651376146789,
+ "grad_norm": 0.07996565848588943,
+ "learning_rate": 2.7832691940031755e-06,
+ "loss": 0.0006,
+ "step": 254
+ },
+ {
+ "epoch": 3.4954128440366974,
+ "grad_norm": 0.39782819151878357,
+ "learning_rate": 2.759752641074322e-06,
+ "loss": 0.0004,
+ "step": 255
+ },
+ {
+ "epoch": 3.5091743119266052,
+ "grad_norm": 0.056630104780197144,
+ "learning_rate": 2.7362128297200784e-06,
+ "loss": 0.0004,
+ "step": 256
+ },
+ {
+ "epoch": 3.522935779816514,
+ "grad_norm": 0.18943996727466583,
+ "learning_rate": 2.712651867710914e-06,
+ "loss": 0.0017,
+ "step": 257
+ },
+ {
+ "epoch": 3.536697247706422,
+ "grad_norm": 0.04645173251628876,
+ "learning_rate": 2.6890718647111424e-06,
+ "loss": 0.0004,
+ "step": 258
+ },
+ {
+ "epoch": 3.5504587155963305,
+ "grad_norm": 0.07644187659025192,
+ "learning_rate": 2.665474932090017e-06,
+ "loss": 0.0008,
+ "step": 259
+ },
+ {
+ "epoch": 3.5642201834862384,
+ "grad_norm": 0.04974009841680527,
+ "learning_rate": 2.6418631827326857e-06,
+ "loss": 0.0005,
+ "step": 260
+ },
+ {
+ "epoch": 3.5779816513761467,
+ "grad_norm": 0.06213025003671646,
+ "learning_rate": 2.6182387308509927e-06,
+ "loss": 0.0005,
+ "step": 261
+ },
+ {
+ "epoch": 3.591743119266055,
+ "grad_norm": 0.09061244130134583,
+ "learning_rate": 2.5946036917941765e-06,
+ "loss": 0.0006,
+ "step": 262
+ },
+ {
+ "epoch": 3.6055045871559632,
+ "grad_norm": 0.0796905905008316,
+ "learning_rate": 2.570960181859458e-06,
+ "loss": 0.0006,
+ "step": 263
+ },
+ {
+ "epoch": 3.6192660550458715,
+ "grad_norm": 0.19685125350952148,
+ "learning_rate": 2.547310318102548e-06,
+ "loss": 0.0006,
+ "step": 264
+ },
+ {
+ "epoch": 3.63302752293578,
+ "grad_norm": 0.030696067959070206,
+ "learning_rate": 2.5236562181480794e-06,
+ "loss": 0.0001,
+ "step": 265
+ },
+ {
+ "epoch": 3.646788990825688,
+ "grad_norm": 0.04516645520925522,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0004,
+ "step": 266
+ },
+ {
+ "epoch": 3.6605504587155964,
+ "grad_norm": 0.09353721141815186,
+ "learning_rate": 2.4763437818519205e-06,
+ "loss": 0.0003,
+ "step": 267
+ },
+ {
+ "epoch": 3.6743119266055047,
+ "grad_norm": 0.05623761937022209,
+ "learning_rate": 2.4526896818974534e-06,
+ "loss": 0.0003,
+ "step": 268
+ },
+ {
+ "epoch": 3.688073394495413,
+ "grad_norm": 0.11353174597024918,
+ "learning_rate": 2.429039818140543e-06,
+ "loss": 0.0002,
+ "step": 269
+ },
+ {
+ "epoch": 3.7018348623853212,
+ "grad_norm": 0.03043302521109581,
+ "learning_rate": 2.405396308205825e-06,
+ "loss": 0.0002,
+ "step": 270
+ },
+ {
+ "epoch": 3.7155963302752295,
+ "grad_norm": 0.05028878524899483,
+ "learning_rate": 2.381761269149009e-06,
+ "loss": 0.0002,
+ "step": 271
+ },
+ {
+ "epoch": 3.729357798165138,
+ "grad_norm": 0.03009975329041481,
+ "learning_rate": 2.358136817267315e-06,
+ "loss": 0.0002,
+ "step": 272
+ },
+ {
+ "epoch": 3.7431192660550456,
+ "grad_norm": 0.08547350764274597,
+ "learning_rate": 2.334525067909983e-06,
+ "loss": 0.0005,
+ "step": 273
+ },
+ {
+ "epoch": 3.7568807339449544,
+ "grad_norm": 0.03611677512526512,
+ "learning_rate": 2.3109281352888593e-06,
+ "loss": 0.0002,
+ "step": 274
+ },
+ {
+ "epoch": 3.770642201834862,
+ "grad_norm": 0.03364509344100952,
+ "learning_rate": 2.2873481322890866e-06,
+ "loss": 0.0001,
+ "step": 275
+ },
+ {
+ "epoch": 3.7844036697247705,
+ "grad_norm": 0.04283633828163147,
+ "learning_rate": 2.263787170279922e-06,
+ "loss": 0.0003,
+ "step": 276
+ },
+ {
+ "epoch": 3.7981651376146788,
+ "grad_norm": 0.057849906384944916,
+ "learning_rate": 2.2402473589256793e-06,
+ "loss": 0.0002,
+ "step": 277
+ },
+ {
+ "epoch": 3.811926605504587,
+ "grad_norm": 0.12497337907552719,
+ "learning_rate": 2.2167308059968258e-06,
+ "loss": 0.0002,
+ "step": 278
+ },
+ {
+ "epoch": 3.8256880733944953,
+ "grad_norm": 0.044824711978435516,
+ "learning_rate": 2.193239617181256e-06,
+ "loss": 0.0003,
+ "step": 279
+ },
+ {
+ "epoch": 3.8394495412844036,
+ "grad_norm": 0.06536471843719482,
+ "learning_rate": 2.169775895895745e-06,
+ "loss": 0.0002,
+ "step": 280
+ },
+ {
+ "epoch": 3.853211009174312,
+ "grad_norm": 0.07215467095375061,
+ "learning_rate": 2.146341743097604e-06,
+ "loss": 0.0002,
+ "step": 281
+ },
+ {
+ "epoch": 3.86697247706422,
+ "grad_norm": 0.01871681585907936,
+ "learning_rate": 2.1229392570965656e-06,
+ "loss": 0.0001,
+ "step": 282
+ },
+ {
+ "epoch": 3.8807339449541285,
+ "grad_norm": 0.043813955038785934,
+ "learning_rate": 2.0995705333668948e-06,
+ "loss": 0.0002,
+ "step": 283
+ },
+ {
+ "epoch": 3.8944954128440368,
+ "grad_norm": 0.046582091599702835,
+ "learning_rate": 2.0762376643597586e-06,
+ "loss": 0.0003,
+ "step": 284
+ },
+ {
+ "epoch": 3.908256880733945,
+ "grad_norm": 0.06702767312526703,
+ "learning_rate": 2.0529427393158704e-06,
+ "loss": 0.0004,
+ "step": 285
+ },
+ {
+ "epoch": 3.9220183486238533,
+ "grad_norm": 0.053124528378248215,
+ "learning_rate": 2.0296878440784164e-06,
+ "loss": 0.0005,
+ "step": 286
+ },
+ {
+ "epoch": 3.9357798165137616,
+ "grad_norm": 0.029215684160590172,
+ "learning_rate": 2.006475060906283e-06,
+ "loss": 0.0002,
+ "step": 287
+ },
+ {
+ "epoch": 3.9495412844036695,
+ "grad_norm": 0.03736970201134682,
+ "learning_rate": 1.9833064682876175e-06,
+ "loss": 0.0001,
+ "step": 288
+ },
+ {
+ "epoch": 3.963302752293578,
+ "grad_norm": 0.0413820743560791,
+ "learning_rate": 1.9601841407537157e-06,
+ "loss": 0.0002,
+ "step": 289
+ },
+ {
+ "epoch": 3.977064220183486,
+ "grad_norm": 0.07081856578588486,
+ "learning_rate": 1.937110148693265e-06,
+ "loss": 0.0004,
+ "step": 290
+ },
+ {
+ "epoch": 3.9908256880733948,
+ "grad_norm": 0.0220099538564682,
+ "learning_rate": 1.9140865581669627e-06,
+ "loss": 0.0001,
+ "step": 291
+ },
+ {
+ "epoch": 4.0,
+ "grad_norm": 0.0220099538564682,
+ "learning_rate": 1.8911154307225204e-06,
+ "loss": 0.0001,
+ "step": 292
+ },
+ {
+ "epoch": 4.013761467889908,
+ "grad_norm": 0.0368737168610096,
+ "learning_rate": 1.8681988232100674e-06,
+ "loss": 0.0002,
+ "step": 293
+ },
+ {
+ "epoch": 4.027522935779817,
+ "grad_norm": 0.024728944525122643,
+ "learning_rate": 1.8453387875979834e-06,
+ "loss": 0.0001,
+ "step": 294
+ },
+ {
+ "epoch": 4.041284403669724,
+ "grad_norm": 0.027091216295957565,
+ "learning_rate": 1.822537370789163e-06,
+ "loss": 0.0002,
+ "step": 295
+ },
+ {
+ "epoch": 4.055045871559633,
+ "grad_norm": 0.05742163583636284,
+ "learning_rate": 1.7997966144377328e-06,
+ "loss": 0.0001,
+ "step": 296
+ },
+ {
+ "epoch": 4.068807339449541,
+ "grad_norm": 0.011909076012670994,
+ "learning_rate": 1.7771185547662417e-06,
+ "loss": 0.0,
+ "step": 297
+ },
+ {
+ "epoch": 4.08256880733945,
+ "grad_norm": 0.005773312412202358,
+ "learning_rate": 1.754505222383337e-06,
+ "loss": 0.0,
+ "step": 298
+ },
+ {
+ "epoch": 4.0963302752293576,
+ "grad_norm": 0.012064033187925816,
+ "learning_rate": 1.7319586421019383e-06,
+ "loss": 0.0,
+ "step": 299
+ },
+ {
+ "epoch": 4.110091743119266,
+ "grad_norm": 0.00871270801872015,
+ "learning_rate": 1.7094808327579401e-06,
+ "loss": 0.0001,
+ "step": 300
+ },
+ {
+ "epoch": 4.123853211009174,
+ "grad_norm": 0.015500242821872234,
+ "learning_rate": 1.6870738070294412e-06,
+ "loss": 0.0001,
+ "step": 301
+ },
+ {
+ "epoch": 4.137614678899083,
+ "grad_norm": 0.010978137142956257,
+ "learning_rate": 1.6647395712565256e-06,
+ "loss": 0.0,
+ "step": 302
+ },
+ {
+ "epoch": 4.151376146788991,
+ "grad_norm": 0.011058066971600056,
+ "learning_rate": 1.6424801252616186e-06,
+ "loss": 0.0001,
+ "step": 303
+ },
+ {
+ "epoch": 4.165137614678899,
+ "grad_norm": 0.029183728620409966,
+ "learning_rate": 1.6202974621704176e-06,
+ "loss": 0.0001,
+ "step": 304
+ },
+ {
+ "epoch": 4.178899082568807,
+ "grad_norm": 0.016758723184466362,
+ "learning_rate": 1.5981935682334266e-06,
+ "loss": 0.0002,
+ "step": 305
+ },
+ {
+ "epoch": 4.192660550458716,
+ "grad_norm": 0.02776522748172283,
+ "learning_rate": 1.5761704226481078e-06,
+ "loss": 0.0001,
+ "step": 306
+ },
+ {
+ "epoch": 4.206422018348624,
+ "grad_norm": 0.008353229612112045,
+ "learning_rate": 1.5542299973816626e-06,
+ "loss": 0.0001,
+ "step": 307
+ },
+ {
+ "epoch": 4.220183486238533,
+ "grad_norm": 0.019200339913368225,
+ "learning_rate": 1.5323742569944573e-06,
+ "loss": 0.0001,
+ "step": 308
+ },
+ {
+ "epoch": 4.23394495412844,
+ "grad_norm": 0.023347314447164536,
+ "learning_rate": 1.5106051584641208e-06,
+ "loss": 0.0001,
+ "step": 309
+ },
+ {
+ "epoch": 4.247706422018348,
+ "grad_norm": 0.035208187997341156,
+ "learning_rate": 1.4889246510103078e-06,
+ "loss": 0.0001,
+ "step": 310
+ },
+ {
+ "epoch": 4.261467889908257,
+ "grad_norm": 0.0710497498512268,
+ "learning_rate": 1.4673346759201728e-06,
+ "loss": 0.0,
+ "step": 311
+ },
+ {
+ "epoch": 4.275229357798165,
+ "grad_norm": 0.018748018890619278,
+ "learning_rate": 1.44583716637454e-06,
+ "loss": 0.0001,
+ "step": 312
+ },
+ {
+ "epoch": 4.2889908256880735,
+ "grad_norm": 0.054301682859659195,
+ "learning_rate": 1.4244340472748076e-06,
+ "loss": 0.0001,
+ "step": 313
+ },
+ {
+ "epoch": 4.302752293577981,
+ "grad_norm": 0.020265033468604088,
+ "learning_rate": 1.403127235070587e-06,
+ "loss": 0.0001,
+ "step": 314
+ },
+ {
+ "epoch": 4.31651376146789,
+ "grad_norm": 0.008297888562083244,
+ "learning_rate": 1.381918637588112e-06,
+ "loss": 0.0,
+ "step": 315
+ },
+ {
+ "epoch": 4.330275229357798,
+ "grad_norm": 0.018647175282239914,
+ "learning_rate": 1.3608101538593965e-06,
+ "loss": 0.0001,
+ "step": 316
+ },
+ {
+ "epoch": 4.344036697247707,
+ "grad_norm": 0.02466970682144165,
+ "learning_rate": 1.3398036739522088e-06,
+ "loss": 0.0002,
+ "step": 317
+ },
+ {
+ "epoch": 4.3577981651376145,
+ "grad_norm": 0.04142339527606964,
+ "learning_rate": 1.3189010788008234e-06,
+ "loss": 0.0001,
+ "step": 318
+ },
+ {
+ "epoch": 4.371559633027523,
+ "grad_norm": 0.025369996204972267,
+ "learning_rate": 1.2981042400376032e-06,
+ "loss": 0.0001,
+ "step": 319
+ },
+ {
+ "epoch": 4.385321100917431,
+ "grad_norm": 0.009671038947999477,
+ "learning_rate": 1.277415019825417e-06,
+ "loss": 0.0,
+ "step": 320
+ },
+ {
+ "epoch": 4.39908256880734,
+ "grad_norm": 0.012685295194387436,
+ "learning_rate": 1.2568352706908937e-06,
+ "loss": 0.0001,
+ "step": 321
+ },
+ {
+ "epoch": 4.412844036697248,
+ "grad_norm": 0.05089607089757919,
+ "learning_rate": 1.2363668353585486e-06,
+ "loss": 0.0001,
+ "step": 322
+ },
+ {
+ "epoch": 4.426605504587156,
+ "grad_norm": 0.024581842124462128,
+ "learning_rate": 1.216011546585785e-06,
+ "loss": 0.0001,
+ "step": 323
+ },
+ {
+ "epoch": 4.440366972477064,
+ "grad_norm": 0.012231200002133846,
+ "learning_rate": 1.195771226998789e-06,
+ "loss": 0.0001,
+ "step": 324
+ },
+ {
+ "epoch": 4.454128440366972,
+ "grad_norm": 0.02831755019724369,
+ "learning_rate": 1.1756476889293269e-06,
+ "loss": 0.0001,
+ "step": 325
+ },
+ {
+ "epoch": 4.467889908256881,
+ "grad_norm": 0.05837830901145935,
+ "learning_rate": 1.1556427342524698e-06,
+ "loss": 0.0001,
+ "step": 326
+ },
+ {
+ "epoch": 4.481651376146789,
+ "grad_norm": 0.015133843757212162,
+ "learning_rate": 1.1357581542252555e-06,
+ "loss": 0.0,
+ "step": 327
+ },
+ {
+ "epoch": 4.495412844036697,
+ "grad_norm": 0.004880247637629509,
+ "learning_rate": 1.1159957293262888e-06,
+ "loss": 0.0,
+ "step": 328
+ },
+ {
+ "epoch": 4.509174311926605,
+ "grad_norm": 0.017852261662483215,
+ "learning_rate": 1.0963572290963298e-06,
+ "loss": 0.0001,
+ "step": 329
+ },
+ {
+ "epoch": 4.522935779816514,
+ "grad_norm": 0.007527775596827269,
+ "learning_rate": 1.0768444119798357e-06,
+ "loss": 0.0001,
+ "step": 330
+ },
+ {
+ "epoch": 4.536697247706422,
+ "grad_norm": 0.00978136993944645,
+ "learning_rate": 1.0574590251675145e-06,
+ "loss": 0.0,
+ "step": 331
+ },
+ {
+ "epoch": 4.5504587155963305,
+ "grad_norm": 0.011405237950384617,
+ "learning_rate": 1.0382028044398823e-06,
+ "loss": 0.0,
+ "step": 332
+ },
+ {
+ "epoch": 4.564220183486238,
+ "grad_norm": 0.004018036648631096,
+ "learning_rate": 1.0190774740118343e-06,
+ "loss": 0.0,
+ "step": 333
+ },
+ {
+ "epoch": 4.577981651376147,
+ "grad_norm": 0.0168945100158453,
+ "learning_rate": 1.0000847463782615e-06,
+ "loss": 0.0001,
+ "step": 334
+ },
+ {
+ "epoch": 4.591743119266055,
+ "grad_norm": 0.008481111377477646,
+ "learning_rate": 9.812263221607114e-07,
+ "loss": 0.0,
+ "step": 335
+ },
+ {
+ "epoch": 4.605504587155964,
+ "grad_norm": 0.014810923486948013,
+ "learning_rate": 9.625038899551162e-07,
+ "loss": 0.0,
+ "step": 336
+ },
+ {
+ "epoch": 4.6192660550458715,
+ "grad_norm": 0.03142401948571205,
+ "learning_rate": 9.439191261805894e-07,
+ "loss": 0.0001,
+ "step": 337
+ },
+ {
+ "epoch": 4.63302752293578,
+ "grad_norm": 0.025308527052402496,
+ "learning_rate": 9.254736949293216e-07,
+ "loss": 0.0,
+ "step": 338
+ },
+ {
+ "epoch": 4.646788990825688,
+ "grad_norm": 0.021518364548683167,
+ "learning_rate": 9.07169247817579e-07,
+ "loss": 0.0001,
+ "step": 339
+ },
+ {
+ "epoch": 4.660550458715596,
+ "grad_norm": 0.007280074991285801,
+ "learning_rate": 8.890074238378074e-07,
+ "loss": 0.0,
+ "step": 340
+ },
+ {
+ "epoch": 4.674311926605505,
+ "grad_norm": 0.007103382609784603,
+ "learning_rate": 8.709898492118885e-07,
+ "loss": 0.0,
+ "step": 341
+ },
+ {
+ "epoch": 4.6880733944954125,
+ "grad_norm": 0.0127399992197752,
+ "learning_rate": 8.531181372455161e-07,
+ "loss": 0.0001,
+ "step": 342
+ },
+ {
+ "epoch": 4.701834862385321,
+ "grad_norm": 0.01236600149422884,
+ "learning_rate": 8.353938881837445e-07,
+ "loss": 0.0001,
+ "step": 343
+ },
+ {
+ "epoch": 4.715596330275229,
+ "grad_norm": 0.01739378273487091,
+ "learning_rate": 8.178186890677029e-07,
+ "loss": 0.0001,
+ "step": 344
+ },
+ {
+ "epoch": 4.729357798165138,
+ "grad_norm": 0.02507130056619644,
+ "learning_rate": 8.003941135924859e-07,
+ "loss": 0.0001,
+ "step": 345
+ },
+ {
+ "epoch": 4.743119266055046,
+ "grad_norm": 0.04195310175418854,
+ "learning_rate": 7.83121721966248e-07,
+ "loss": 0.0,
+ "step": 346
+ },
+ {
+ "epoch": 4.756880733944954,
+ "grad_norm": 0.04957769811153412,
+ "learning_rate": 7.66003060770498e-07,
+ "loss": 0.0002,
+ "step": 347
+ },
+ {
+ "epoch": 4.770642201834862,
+ "grad_norm": 0.015677858144044876,
+ "learning_rate": 7.490396628216237e-07,
+ "loss": 0.0,
+ "step": 348
+ },
+ {
+ "epoch": 4.784403669724771,
+ "grad_norm": 0.009202621877193451,
+ "learning_rate": 7.322330470336314e-07,
+ "loss": 0.0001,
+ "step": 349
+ },
+ {
+ "epoch": 4.798165137614679,
+ "grad_norm": 0.0277025755494833,
+ "learning_rate": 7.155847182821524e-07,
+ "loss": 0.0001,
+ "step": 350
+ },
+ {
+ "epoch": 4.8119266055045875,
+ "grad_norm": 0.022610262036323547,
+ "learning_rate": 6.990961672696908e-07,
+ "loss": 0.0001,
+ "step": 351
+ },
+ {
+ "epoch": 4.825688073394495,
+ "grad_norm": 0.008024164475500584,
+ "learning_rate": 6.827688703921407e-07,
+ "loss": 0.0001,
+ "step": 352
+ },
+ {
+ "epoch": 4.839449541284404,
+ "grad_norm": 0.025318168103694916,
+ "learning_rate": 6.666042896065983e-07,
+ "loss": 0.0001,
+ "step": 353
+ },
+ {
+ "epoch": 4.853211009174312,
+ "grad_norm": 0.022787343710660934,
+ "learning_rate": 6.506038723004484e-07,
+ "loss": 0.0001,
+ "step": 354
+ },
+ {
+ "epoch": 4.86697247706422,
+ "grad_norm": 0.04321616515517235,
+ "learning_rate": 6.347690511617693e-07,
+ "loss": 0.0002,
+ "step": 355
+ },
+ {
+ "epoch": 4.8807339449541285,
+ "grad_norm": 0.03710443153977394,
+ "learning_rate": 6.191012440510469e-07,
+ "loss": 0.0,
+ "step": 356
+ },
+ {
+ "epoch": 4.894495412844036,
+ "grad_norm": 0.012855797074735165,
+ "learning_rate": 6.036018538742208e-07,
+ "loss": 0.0,
+ "step": 357
+ },
+ {
+ "epoch": 4.908256880733945,
+ "grad_norm": 0.013949613086879253,
+ "learning_rate": 5.882722684570638e-07,
+ "loss": 0.0001,
+ "step": 358
+ },
+ {
+ "epoch": 4.922018348623853,
+ "grad_norm": 0.03625642880797386,
+ "learning_rate": 5.731138604209169e-07,
+ "loss": 0.0,
+ "step": 359
+ },
+ {
+ "epoch": 4.935779816513762,
+ "grad_norm": 0.006596633233129978,
+ "learning_rate": 5.581279870597866e-07,
+ "loss": 0.0,
+ "step": 360
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 432,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 72,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 2.3061175005735813e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-360/training_args.bin b/checkpoint-360/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6b48f4b52f5bfff81ec8534f6510460a8014f336
--- /dev/null
+++ b/checkpoint-360/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cbafef0713d4b794ca3a92a04d378baaf3fa7647c9af95948bfb2ef7c0e02eda
+size 7928
diff --git a/checkpoint-360/zero_to_fp32.py b/checkpoint-360/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-360/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-432/README.md b/checkpoint-432/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b184114a0c28ed3e4c082c18486736dc818166d
--- /dev/null
+++ b/checkpoint-432/README.md
@@ -0,0 +1,202 @@
+---
+base_model: meta-llama/Llama-3.3-70B-Instruct
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-432/adapter_config.json b/checkpoint-432/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/checkpoint-432/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-432/adapter_model.safetensors b/checkpoint-432/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..d1b4b5e90cad3f475a12c76299351098820ed1c8
--- /dev/null
+++ b/checkpoint-432/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:88e1a2c48f01ab33b45cd092a26df5e1d3846df9491f36c1d0732fad8129233e
+size 10829849744
diff --git a/checkpoint-432/global_step430/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-432/global_step430/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..d6c6f9030f5c40ae81c5d70f3b64130dd01edc15
--- /dev/null
+++ b/checkpoint-432/global_step430/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3d36271382ca300c43470675febd38dfe5eaf66796dd1dbecb0917c67b249e65
+size 21659418140
diff --git a/checkpoint-432/global_step430/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-432/global_step430/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..27f480f873e4485ac5948da1df111b811f385d04
--- /dev/null
+++ b/checkpoint-432/global_step430/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3c39c34a2302f4efb1c60173076a2a6d7f78ae152509bcc8efc3cefabd492bf4
+size 21659457372
diff --git a/checkpoint-432/global_step430/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/checkpoint-432/global_step430/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..fe0c5f4555fb4f0ddf30585284e28612e5dc8728
--- /dev/null
+++ b/checkpoint-432/global_step430/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:352ae7a3722814752164abf69051743990eff566c48baae70009e91fee6c2f1b
+size 21659417820
diff --git a/checkpoint-432/global_step430/mp_rank_00_model_states.pt b/checkpoint-432/global_step430/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..1c9a1faf22012a85c5e4450c950b534ceb2625c1
--- /dev/null
+++ b/checkpoint-432/global_step430/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1ed8b2e902f8bb55e73fc1e697f8e35a6b7871da10e08a02a6a534956103d06c
+size 11918643933
diff --git a/checkpoint-432/latest b/checkpoint-432/latest
new file mode 100644
index 0000000000000000000000000000000000000000..663101ca7341464e37e1c0589abf9fa0d9da76d2
--- /dev/null
+++ b/checkpoint-432/latest
@@ -0,0 +1 @@
+global_step430
\ No newline at end of file
diff --git a/checkpoint-432/rng_state_0.pth b/checkpoint-432/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..f89c17717082a10a358616ef1684ddc42c422584
--- /dev/null
+++ b/checkpoint-432/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d05a8e3632311ff06ed037477f2aabf50a6c974e4088d5aa40f4be80fcaf90d2
+size 14768
diff --git a/checkpoint-432/rng_state_1.pth b/checkpoint-432/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b5dd6385d77dd5fab0c03e47ac5cc562ec38b390
--- /dev/null
+++ b/checkpoint-432/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9c69dfe7cf59edfdedcd9f777f634c6c46ddff72d4aba9ecea270a59914d17ec
+size 14768
diff --git a/checkpoint-432/rng_state_2.pth b/checkpoint-432/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..cb914187808e140e44e8934c88fb9fc9a015b18e
--- /dev/null
+++ b/checkpoint-432/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:461988e7574561d3751de56d2564f596ac3d1e4312dbceab6481f89a574c5d8b
+size 14768
diff --git a/checkpoint-432/scheduler.pt b/checkpoint-432/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..de62fc323d19a098639523b3d978824d194046f7
--- /dev/null
+++ b/checkpoint-432/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:111ca90245be96ef90c53bd1dd14d193f8b0438ebf2268676c1e7ceb5e4eb4c1
+size 1064
diff --git a/checkpoint-432/special_tokens_map.json b/checkpoint-432/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-432/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-432/tokenizer.json b/checkpoint-432/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-432/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-432/tokenizer_config.json b/checkpoint-432/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/checkpoint-432/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-432/trainer_state.json b/checkpoint-432/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..e0e6b772c89930bf7f7b56410d943951e54edc85
--- /dev/null
+++ b/checkpoint-432/trainer_state.json
@@ -0,0 +1,3057 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 5.922018348623853,
+ "eval_steps": 500,
+ "global_step": 432,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.013761467889908258,
+ "grad_norm": 38.02450942993164,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 3.125,
+ "step": 1
+ },
+ {
+ "epoch": 0.027522935779816515,
+ "grad_norm": 37.864768981933594,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 3.0998,
+ "step": 2
+ },
+ {
+ "epoch": 0.04128440366972477,
+ "grad_norm": 38.34700012207031,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 3.1533,
+ "step": 3
+ },
+ {
+ "epoch": 0.05504587155963303,
+ "grad_norm": 38.33641815185547,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 3.1542,
+ "step": 4
+ },
+ {
+ "epoch": 0.06880733944954129,
+ "grad_norm": 38.064449310302734,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 3.1153,
+ "step": 5
+ },
+ {
+ "epoch": 0.08256880733944955,
+ "grad_norm": 37.92089080810547,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 3.0867,
+ "step": 6
+ },
+ {
+ "epoch": 0.0963302752293578,
+ "grad_norm": 38.120323181152344,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 3.093,
+ "step": 7
+ },
+ {
+ "epoch": 0.11009174311926606,
+ "grad_norm": 38.47222900390625,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 3.1056,
+ "step": 8
+ },
+ {
+ "epoch": 0.12385321100917432,
+ "grad_norm": 38.013702392578125,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 3.0474,
+ "step": 9
+ },
+ {
+ "epoch": 0.13761467889908258,
+ "grad_norm": 38.17593002319336,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 3.0264,
+ "step": 10
+ },
+ {
+ "epoch": 0.15137614678899083,
+ "grad_norm": 38.60066604614258,
+ "learning_rate": 5.5e-07,
+ "loss": 2.9404,
+ "step": 11
+ },
+ {
+ "epoch": 0.1651376146788991,
+ "grad_norm": 38.83498764038086,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 2.9571,
+ "step": 12
+ },
+ {
+ "epoch": 0.17889908256880735,
+ "grad_norm": 38.942047119140625,
+ "learning_rate": 6.5e-07,
+ "loss": 2.8849,
+ "step": 13
+ },
+ {
+ "epoch": 0.1926605504587156,
+ "grad_norm": 38.0286865234375,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 2.7486,
+ "step": 14
+ },
+ {
+ "epoch": 0.20642201834862386,
+ "grad_norm": 38.31856155395508,
+ "learning_rate": 7.5e-07,
+ "loss": 2.6876,
+ "step": 15
+ },
+ {
+ "epoch": 0.22018348623853212,
+ "grad_norm": 38.124759674072266,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 2.5992,
+ "step": 16
+ },
+ {
+ "epoch": 0.23394495412844038,
+ "grad_norm": 36.59762191772461,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 2.4063,
+ "step": 17
+ },
+ {
+ "epoch": 0.24770642201834864,
+ "grad_norm": 36.63874435424805,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 2.3109,
+ "step": 18
+ },
+ {
+ "epoch": 0.26146788990825687,
+ "grad_norm": 36.768577575683594,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 2.1677,
+ "step": 19
+ },
+ {
+ "epoch": 0.27522935779816515,
+ "grad_norm": 36.187137603759766,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 1.9551,
+ "step": 20
+ },
+ {
+ "epoch": 0.2889908256880734,
+ "grad_norm": 35.55617141723633,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 1.8053,
+ "step": 21
+ },
+ {
+ "epoch": 0.30275229357798167,
+ "grad_norm": 34.60952377319336,
+ "learning_rate": 1.1e-06,
+ "loss": 1.5654,
+ "step": 22
+ },
+ {
+ "epoch": 0.3165137614678899,
+ "grad_norm": 33.69620895385742,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 1.3454,
+ "step": 23
+ },
+ {
+ "epoch": 0.3302752293577982,
+ "grad_norm": 34.33642578125,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 1.2417,
+ "step": 24
+ },
+ {
+ "epoch": 0.3440366972477064,
+ "grad_norm": 31.23066520690918,
+ "learning_rate": 1.25e-06,
+ "loss": 0.9839,
+ "step": 25
+ },
+ {
+ "epoch": 0.3577981651376147,
+ "grad_norm": 25.810237884521484,
+ "learning_rate": 1.3e-06,
+ "loss": 0.793,
+ "step": 26
+ },
+ {
+ "epoch": 0.37155963302752293,
+ "grad_norm": 23.06692886352539,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 0.6082,
+ "step": 27
+ },
+ {
+ "epoch": 0.3853211009174312,
+ "grad_norm": 19.828439712524414,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 0.4845,
+ "step": 28
+ },
+ {
+ "epoch": 0.39908256880733944,
+ "grad_norm": 14.150300025939941,
+ "learning_rate": 1.45e-06,
+ "loss": 0.348,
+ "step": 29
+ },
+ {
+ "epoch": 0.41284403669724773,
+ "grad_norm": 9.044266700744629,
+ "learning_rate": 1.5e-06,
+ "loss": 0.2516,
+ "step": 30
+ },
+ {
+ "epoch": 0.42660550458715596,
+ "grad_norm": 5.704404354095459,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 0.177,
+ "step": 31
+ },
+ {
+ "epoch": 0.44036697247706424,
+ "grad_norm": 3.2953503131866455,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 0.1391,
+ "step": 32
+ },
+ {
+ "epoch": 0.4541284403669725,
+ "grad_norm": 2.453219413757324,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 0.0982,
+ "step": 33
+ },
+ {
+ "epoch": 0.46788990825688076,
+ "grad_norm": 2.0325512886047363,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 0.0807,
+ "step": 34
+ },
+ {
+ "epoch": 0.481651376146789,
+ "grad_norm": 1.6322681903839111,
+ "learning_rate": 1.75e-06,
+ "loss": 0.0725,
+ "step": 35
+ },
+ {
+ "epoch": 0.4954128440366973,
+ "grad_norm": 0.9713364839553833,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 0.067,
+ "step": 36
+ },
+ {
+ "epoch": 0.5091743119266054,
+ "grad_norm": 0.7980225682258606,
+ "learning_rate": 1.85e-06,
+ "loss": 0.0582,
+ "step": 37
+ },
+ {
+ "epoch": 0.5229357798165137,
+ "grad_norm": 1.0616590976715088,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 0.0562,
+ "step": 38
+ },
+ {
+ "epoch": 0.536697247706422,
+ "grad_norm": 1.053462028503418,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 0.0537,
+ "step": 39
+ },
+ {
+ "epoch": 0.5504587155963303,
+ "grad_norm": 0.9452660083770752,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 0.0602,
+ "step": 40
+ },
+ {
+ "epoch": 0.5642201834862385,
+ "grad_norm": 0.830368161201477,
+ "learning_rate": 2.05e-06,
+ "loss": 0.0549,
+ "step": 41
+ },
+ {
+ "epoch": 0.5779816513761468,
+ "grad_norm": 0.5791187882423401,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 0.0479,
+ "step": 42
+ },
+ {
+ "epoch": 0.591743119266055,
+ "grad_norm": 0.44175243377685547,
+ "learning_rate": 2.15e-06,
+ "loss": 0.0461,
+ "step": 43
+ },
+ {
+ "epoch": 0.6055045871559633,
+ "grad_norm": 0.37655699253082275,
+ "learning_rate": 2.2e-06,
+ "loss": 0.043,
+ "step": 44
+ },
+ {
+ "epoch": 0.6192660550458715,
+ "grad_norm": 0.34382495284080505,
+ "learning_rate": 2.25e-06,
+ "loss": 0.0454,
+ "step": 45
+ },
+ {
+ "epoch": 0.6330275229357798,
+ "grad_norm": 0.5047216415405273,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 0.0437,
+ "step": 46
+ },
+ {
+ "epoch": 0.6467889908256881,
+ "grad_norm": 0.6318779587745667,
+ "learning_rate": 2.35e-06,
+ "loss": 0.0468,
+ "step": 47
+ },
+ {
+ "epoch": 0.6605504587155964,
+ "grad_norm": 0.5135455131530762,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 0.0494,
+ "step": 48
+ },
+ {
+ "epoch": 0.6743119266055045,
+ "grad_norm": 0.4802612066268921,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 0.0441,
+ "step": 49
+ },
+ {
+ "epoch": 0.6880733944954128,
+ "grad_norm": 0.6157718300819397,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0398,
+ "step": 50
+ },
+ {
+ "epoch": 0.7018348623853211,
+ "grad_norm": 0.4327130913734436,
+ "learning_rate": 2.55e-06,
+ "loss": 0.0438,
+ "step": 51
+ },
+ {
+ "epoch": 0.7155963302752294,
+ "grad_norm": 0.46133658289909363,
+ "learning_rate": 2.6e-06,
+ "loss": 0.041,
+ "step": 52
+ },
+ {
+ "epoch": 0.7293577981651376,
+ "grad_norm": 0.5729146003723145,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 0.0406,
+ "step": 53
+ },
+ {
+ "epoch": 0.7431192660550459,
+ "grad_norm": 0.32373812794685364,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 0.0419,
+ "step": 54
+ },
+ {
+ "epoch": 0.7568807339449541,
+ "grad_norm": 0.29006752371788025,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 0.0415,
+ "step": 55
+ },
+ {
+ "epoch": 0.7706422018348624,
+ "grad_norm": 0.31038960814476013,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 0.0344,
+ "step": 56
+ },
+ {
+ "epoch": 0.7844036697247706,
+ "grad_norm": 0.2324836701154709,
+ "learning_rate": 2.85e-06,
+ "loss": 0.0374,
+ "step": 57
+ },
+ {
+ "epoch": 0.7981651376146789,
+ "grad_norm": 0.5083625912666321,
+ "learning_rate": 2.9e-06,
+ "loss": 0.0324,
+ "step": 58
+ },
+ {
+ "epoch": 0.8119266055045872,
+ "grad_norm": 0.2873130142688751,
+ "learning_rate": 2.95e-06,
+ "loss": 0.0403,
+ "step": 59
+ },
+ {
+ "epoch": 0.8256880733944955,
+ "grad_norm": 0.437663197517395,
+ "learning_rate": 3e-06,
+ "loss": 0.0368,
+ "step": 60
+ },
+ {
+ "epoch": 0.8394495412844036,
+ "grad_norm": 0.5645247101783752,
+ "learning_rate": 3.05e-06,
+ "loss": 0.0386,
+ "step": 61
+ },
+ {
+ "epoch": 0.8532110091743119,
+ "grad_norm": 0.40374210476875305,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 0.0425,
+ "step": 62
+ },
+ {
+ "epoch": 0.8669724770642202,
+ "grad_norm": 0.46468955278396606,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 0.0323,
+ "step": 63
+ },
+ {
+ "epoch": 0.8807339449541285,
+ "grad_norm": 0.29952895641326904,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 0.0325,
+ "step": 64
+ },
+ {
+ "epoch": 0.8944954128440367,
+ "grad_norm": 0.3678436279296875,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 0.036,
+ "step": 65
+ },
+ {
+ "epoch": 0.908256880733945,
+ "grad_norm": 0.5068934559822083,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 0.0357,
+ "step": 66
+ },
+ {
+ "epoch": 0.9220183486238532,
+ "grad_norm": 0.2723177671432495,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 0.0333,
+ "step": 67
+ },
+ {
+ "epoch": 0.9357798165137615,
+ "grad_norm": 0.41696834564208984,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 0.0347,
+ "step": 68
+ },
+ {
+ "epoch": 0.9495412844036697,
+ "grad_norm": 0.2582981288433075,
+ "learning_rate": 3.45e-06,
+ "loss": 0.0283,
+ "step": 69
+ },
+ {
+ "epoch": 0.963302752293578,
+ "grad_norm": 0.40648311376571655,
+ "learning_rate": 3.5e-06,
+ "loss": 0.0293,
+ "step": 70
+ },
+ {
+ "epoch": 0.9770642201834863,
+ "grad_norm": 0.4149394631385803,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 0.0311,
+ "step": 71
+ },
+ {
+ "epoch": 0.9908256880733946,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 0.0346,
+ "step": 72
+ },
+ {
+ "epoch": 1.0,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.65e-06,
+ "loss": 0.0291,
+ "step": 73
+ },
+ {
+ "epoch": 1.0137614678899083,
+ "grad_norm": 0.5016496181488037,
+ "learning_rate": 3.7e-06,
+ "loss": 0.0286,
+ "step": 74
+ },
+ {
+ "epoch": 1.0275229357798166,
+ "grad_norm": 0.3533766567707062,
+ "learning_rate": 3.7500000000000005e-06,
+ "loss": 0.0321,
+ "step": 75
+ },
+ {
+ "epoch": 1.0412844036697249,
+ "grad_norm": 0.2785470485687256,
+ "learning_rate": 3.8000000000000005e-06,
+ "loss": 0.0277,
+ "step": 76
+ },
+ {
+ "epoch": 1.0550458715596331,
+ "grad_norm": 0.4530641436576843,
+ "learning_rate": 3.85e-06,
+ "loss": 0.0294,
+ "step": 77
+ },
+ {
+ "epoch": 1.0688073394495412,
+ "grad_norm": 0.3170749843120575,
+ "learning_rate": 3.900000000000001e-06,
+ "loss": 0.0274,
+ "step": 78
+ },
+ {
+ "epoch": 1.0825688073394495,
+ "grad_norm": 0.26502758264541626,
+ "learning_rate": 3.95e-06,
+ "loss": 0.0284,
+ "step": 79
+ },
+ {
+ "epoch": 1.0963302752293578,
+ "grad_norm": 0.5486436486244202,
+ "learning_rate": 4.000000000000001e-06,
+ "loss": 0.0285,
+ "step": 80
+ },
+ {
+ "epoch": 1.110091743119266,
+ "grad_norm": 0.24868083000183105,
+ "learning_rate": 4.05e-06,
+ "loss": 0.0301,
+ "step": 81
+ },
+ {
+ "epoch": 1.1238532110091743,
+ "grad_norm": 0.3448987305164337,
+ "learning_rate": 4.1e-06,
+ "loss": 0.0261,
+ "step": 82
+ },
+ {
+ "epoch": 1.1376146788990826,
+ "grad_norm": 0.3330553472042084,
+ "learning_rate": 4.15e-06,
+ "loss": 0.03,
+ "step": 83
+ },
+ {
+ "epoch": 1.151376146788991,
+ "grad_norm": 0.3379852771759033,
+ "learning_rate": 4.2000000000000004e-06,
+ "loss": 0.0286,
+ "step": 84
+ },
+ {
+ "epoch": 1.165137614678899,
+ "grad_norm": 0.23678433895111084,
+ "learning_rate": 4.25e-06,
+ "loss": 0.0245,
+ "step": 85
+ },
+ {
+ "epoch": 1.1788990825688073,
+ "grad_norm": 0.24502314627170563,
+ "learning_rate": 4.3e-06,
+ "loss": 0.0267,
+ "step": 86
+ },
+ {
+ "epoch": 1.1926605504587156,
+ "grad_norm": 0.34288597106933594,
+ "learning_rate": 4.350000000000001e-06,
+ "loss": 0.0259,
+ "step": 87
+ },
+ {
+ "epoch": 1.2064220183486238,
+ "grad_norm": 0.20595045387744904,
+ "learning_rate": 4.4e-06,
+ "loss": 0.0241,
+ "step": 88
+ },
+ {
+ "epoch": 1.2201834862385321,
+ "grad_norm": 0.28399360179901123,
+ "learning_rate": 4.450000000000001e-06,
+ "loss": 0.0277,
+ "step": 89
+ },
+ {
+ "epoch": 1.2339449541284404,
+ "grad_norm": 0.273929238319397,
+ "learning_rate": 4.5e-06,
+ "loss": 0.0261,
+ "step": 90
+ },
+ {
+ "epoch": 1.2477064220183487,
+ "grad_norm": 0.24288330972194672,
+ "learning_rate": 4.5500000000000005e-06,
+ "loss": 0.0267,
+ "step": 91
+ },
+ {
+ "epoch": 1.261467889908257,
+ "grad_norm": 0.42502400279045105,
+ "learning_rate": 4.600000000000001e-06,
+ "loss": 0.0252,
+ "step": 92
+ },
+ {
+ "epoch": 1.2752293577981653,
+ "grad_norm": 0.17670072615146637,
+ "learning_rate": 4.65e-06,
+ "loss": 0.0231,
+ "step": 93
+ },
+ {
+ "epoch": 1.2889908256880733,
+ "grad_norm": 0.23585423827171326,
+ "learning_rate": 4.7e-06,
+ "loss": 0.0213,
+ "step": 94
+ },
+ {
+ "epoch": 1.3027522935779816,
+ "grad_norm": 0.32558879256248474,
+ "learning_rate": 4.75e-06,
+ "loss": 0.0226,
+ "step": 95
+ },
+ {
+ "epoch": 1.31651376146789,
+ "grad_norm": 0.2908780872821808,
+ "learning_rate": 4.800000000000001e-06,
+ "loss": 0.0274,
+ "step": 96
+ },
+ {
+ "epoch": 1.3302752293577982,
+ "grad_norm": 0.3725607991218567,
+ "learning_rate": 4.85e-06,
+ "loss": 0.0241,
+ "step": 97
+ },
+ {
+ "epoch": 1.3440366972477065,
+ "grad_norm": 0.3833301067352295,
+ "learning_rate": 4.9000000000000005e-06,
+ "loss": 0.0252,
+ "step": 98
+ },
+ {
+ "epoch": 1.3577981651376148,
+ "grad_norm": 0.32000771164894104,
+ "learning_rate": 4.95e-06,
+ "loss": 0.0236,
+ "step": 99
+ },
+ {
+ "epoch": 1.3715596330275228,
+ "grad_norm": 0.3203510344028473,
+ "learning_rate": 5e-06,
+ "loss": 0.0235,
+ "step": 100
+ },
+ {
+ "epoch": 1.385321100917431,
+ "grad_norm": 0.20016217231750488,
+ "learning_rate": 4.999888074163108e-06,
+ "loss": 0.0218,
+ "step": 101
+ },
+ {
+ "epoch": 1.3990825688073394,
+ "grad_norm": 0.3012026250362396,
+ "learning_rate": 4.999552306674345e-06,
+ "loss": 0.0234,
+ "step": 102
+ },
+ {
+ "epoch": 1.4128440366972477,
+ "grad_norm": 0.22818222641944885,
+ "learning_rate": 4.998992727598557e-06,
+ "loss": 0.0212,
+ "step": 103
+ },
+ {
+ "epoch": 1.426605504587156,
+ "grad_norm": 0.24128392338752747,
+ "learning_rate": 4.998209387040829e-06,
+ "loss": 0.0207,
+ "step": 104
+ },
+ {
+ "epoch": 1.4403669724770642,
+ "grad_norm": 0.3475555181503296,
+ "learning_rate": 4.9972023551419995e-06,
+ "loss": 0.0246,
+ "step": 105
+ },
+ {
+ "epoch": 1.4541284403669725,
+ "grad_norm": 0.21223627030849457,
+ "learning_rate": 4.995971722072379e-06,
+ "loss": 0.0237,
+ "step": 106
+ },
+ {
+ "epoch": 1.4678899082568808,
+ "grad_norm": 0.3776336908340454,
+ "learning_rate": 4.9945175980236745e-06,
+ "loss": 0.0218,
+ "step": 107
+ },
+ {
+ "epoch": 1.481651376146789,
+ "grad_norm": 0.25027793645858765,
+ "learning_rate": 4.992840113199131e-06,
+ "loss": 0.0214,
+ "step": 108
+ },
+ {
+ "epoch": 1.4954128440366974,
+ "grad_norm": 0.2559281885623932,
+ "learning_rate": 4.990939417801859e-06,
+ "loss": 0.0213,
+ "step": 109
+ },
+ {
+ "epoch": 1.5091743119266054,
+ "grad_norm": 0.33694687485694885,
+ "learning_rate": 4.988815682021398e-06,
+ "loss": 0.0182,
+ "step": 110
+ },
+ {
+ "epoch": 1.5229357798165137,
+ "grad_norm": 0.3175147473812103,
+ "learning_rate": 4.986469096018472e-06,
+ "loss": 0.0213,
+ "step": 111
+ },
+ {
+ "epoch": 1.536697247706422,
+ "grad_norm": 0.48777177929878235,
+ "learning_rate": 4.983899869907963e-06,
+ "loss": 0.0186,
+ "step": 112
+ },
+ {
+ "epoch": 1.5504587155963303,
+ "grad_norm": 0.34633246064186096,
+ "learning_rate": 4.981108233740096e-06,
+ "loss": 0.0224,
+ "step": 113
+ },
+ {
+ "epoch": 1.5642201834862384,
+ "grad_norm": 0.17302758991718292,
+ "learning_rate": 4.978094437479843e-06,
+ "loss": 0.0189,
+ "step": 114
+ },
+ {
+ "epoch": 1.5779816513761467,
+ "grad_norm": 0.24110931158065796,
+ "learning_rate": 4.97485875098454e-06,
+ "loss": 0.0174,
+ "step": 115
+ },
+ {
+ "epoch": 1.591743119266055,
+ "grad_norm": 0.2965283989906311,
+ "learning_rate": 4.971401463979722e-06,
+ "loss": 0.0174,
+ "step": 116
+ },
+ {
+ "epoch": 1.6055045871559632,
+ "grad_norm": 0.34184500575065613,
+ "learning_rate": 4.967722886033181e-06,
+ "loss": 0.0162,
+ "step": 117
+ },
+ {
+ "epoch": 1.6192660550458715,
+ "grad_norm": 0.3942873179912567,
+ "learning_rate": 4.963823346527249e-06,
+ "loss": 0.0176,
+ "step": 118
+ },
+ {
+ "epoch": 1.6330275229357798,
+ "grad_norm": 0.21840929985046387,
+ "learning_rate": 4.959703194629304e-06,
+ "loss": 0.018,
+ "step": 119
+ },
+ {
+ "epoch": 1.646788990825688,
+ "grad_norm": 0.23672759532928467,
+ "learning_rate": 4.955362799260507e-06,
+ "loss": 0.0204,
+ "step": 120
+ },
+ {
+ "epoch": 1.6605504587155964,
+ "grad_norm": 0.18776445090770721,
+ "learning_rate": 4.950802549062764e-06,
+ "loss": 0.0173,
+ "step": 121
+ },
+ {
+ "epoch": 1.6743119266055047,
+ "grad_norm": 0.279297411441803,
+ "learning_rate": 4.946022852363932e-06,
+ "loss": 0.0177,
+ "step": 122
+ },
+ {
+ "epoch": 1.688073394495413,
+ "grad_norm": 0.20893588662147522,
+ "learning_rate": 4.9410241371412525e-06,
+ "loss": 0.0189,
+ "step": 123
+ },
+ {
+ "epoch": 1.7018348623853212,
+ "grad_norm": 0.26409876346588135,
+ "learning_rate": 4.935806850983034e-06,
+ "loss": 0.0195,
+ "step": 124
+ },
+ {
+ "epoch": 1.7155963302752295,
+ "grad_norm": 0.23863324522972107,
+ "learning_rate": 4.9303714610485705e-06,
+ "loss": 0.0151,
+ "step": 125
+ },
+ {
+ "epoch": 1.7293577981651376,
+ "grad_norm": 0.25934213399887085,
+ "learning_rate": 4.924718454026318e-06,
+ "loss": 0.0157,
+ "step": 126
+ },
+ {
+ "epoch": 1.7431192660550459,
+ "grad_norm": 0.2923693358898163,
+ "learning_rate": 4.918848336090309e-06,
+ "loss": 0.0155,
+ "step": 127
+ },
+ {
+ "epoch": 1.7568807339449541,
+ "grad_norm": 0.16973069310188293,
+ "learning_rate": 4.912761632854834e-06,
+ "loss": 0.0156,
+ "step": 128
+ },
+ {
+ "epoch": 1.7706422018348624,
+ "grad_norm": 0.25908610224723816,
+ "learning_rate": 4.906458889327375e-06,
+ "loss": 0.0159,
+ "step": 129
+ },
+ {
+ "epoch": 1.7844036697247705,
+ "grad_norm": 0.27444136142730713,
+ "learning_rate": 4.899940669859807e-06,
+ "loss": 0.0159,
+ "step": 130
+ },
+ {
+ "epoch": 1.7981651376146788,
+ "grad_norm": 0.25167539715766907,
+ "learning_rate": 4.893207558097867e-06,
+ "loss": 0.0147,
+ "step": 131
+ },
+ {
+ "epoch": 1.811926605504587,
+ "grad_norm": 0.25777608156204224,
+ "learning_rate": 4.8862601569288885e-06,
+ "loss": 0.016,
+ "step": 132
+ },
+ {
+ "epoch": 1.8256880733944953,
+ "grad_norm": 0.24190428853034973,
+ "learning_rate": 4.879099088427824e-06,
+ "loss": 0.0128,
+ "step": 133
+ },
+ {
+ "epoch": 1.8394495412844036,
+ "grad_norm": 0.17028410732746124,
+ "learning_rate": 4.871724993801541e-06,
+ "loss": 0.0145,
+ "step": 134
+ },
+ {
+ "epoch": 1.853211009174312,
+ "grad_norm": 0.25141075253486633,
+ "learning_rate": 4.864138533331411e-06,
+ "loss": 0.0144,
+ "step": 135
+ },
+ {
+ "epoch": 1.8669724770642202,
+ "grad_norm": 0.6400424838066101,
+ "learning_rate": 4.8563403863141825e-06,
+ "loss": 0.0128,
+ "step": 136
+ },
+ {
+ "epoch": 1.8807339449541285,
+ "grad_norm": 0.22450514137744904,
+ "learning_rate": 4.84833125100116e-06,
+ "loss": 0.0167,
+ "step": 137
+ },
+ {
+ "epoch": 1.8944954128440368,
+ "grad_norm": 0.19940154254436493,
+ "learning_rate": 4.840111844535682e-06,
+ "loss": 0.0154,
+ "step": 138
+ },
+ {
+ "epoch": 1.908256880733945,
+ "grad_norm": 0.2946206033229828,
+ "learning_rate": 4.8316829028889076e-06,
+ "loss": 0.0158,
+ "step": 139
+ },
+ {
+ "epoch": 1.9220183486238533,
+ "grad_norm": 0.4694023132324219,
+ "learning_rate": 4.823045180793914e-06,
+ "loss": 0.0153,
+ "step": 140
+ },
+ {
+ "epoch": 1.9357798165137616,
+ "grad_norm": 0.23655226826667786,
+ "learning_rate": 4.8141994516781196e-06,
+ "loss": 0.0109,
+ "step": 141
+ },
+ {
+ "epoch": 1.9495412844036697,
+ "grad_norm": 0.23846553266048431,
+ "learning_rate": 4.805146507594034e-06,
+ "loss": 0.0129,
+ "step": 142
+ },
+ {
+ "epoch": 1.963302752293578,
+ "grad_norm": 0.2763686180114746,
+ "learning_rate": 4.7958871591483305e-06,
+ "loss": 0.0124,
+ "step": 143
+ },
+ {
+ "epoch": 1.9770642201834863,
+ "grad_norm": 0.23522883653640747,
+ "learning_rate": 4.786422235429269e-06,
+ "loss": 0.0125,
+ "step": 144
+ },
+ {
+ "epoch": 1.9908256880733946,
+ "grad_norm": 0.27468597888946533,
+ "learning_rate": 4.776752583932455e-06,
+ "loss": 0.0137,
+ "step": 145
+ },
+ {
+ "epoch": 2.0,
+ "grad_norm": 0.26614058017730713,
+ "learning_rate": 4.766879070484957e-06,
+ "loss": 0.0113,
+ "step": 146
+ },
+ {
+ "epoch": 2.0137614678899083,
+ "grad_norm": 0.2862620949745178,
+ "learning_rate": 4.756802579167781e-06,
+ "loss": 0.0104,
+ "step": 147
+ },
+ {
+ "epoch": 2.0275229357798166,
+ "grad_norm": 0.1770494133234024,
+ "learning_rate": 4.746524012236706e-06,
+ "loss": 0.0102,
+ "step": 148
+ },
+ {
+ "epoch": 2.041284403669725,
+ "grad_norm": 0.1850830465555191,
+ "learning_rate": 4.736044290041496e-06,
+ "loss": 0.0125,
+ "step": 149
+ },
+ {
+ "epoch": 2.055045871559633,
+ "grad_norm": 0.20552438497543335,
+ "learning_rate": 4.725364350943492e-06,
+ "loss": 0.0091,
+ "step": 150
+ },
+ {
+ "epoch": 2.0688073394495414,
+ "grad_norm": 0.12979158759117126,
+ "learning_rate": 4.714485151231593e-06,
+ "loss": 0.0113,
+ "step": 151
+ },
+ {
+ "epoch": 2.0825688073394497,
+ "grad_norm": 0.16992178559303284,
+ "learning_rate": 4.703407665036622e-06,
+ "loss": 0.0075,
+ "step": 152
+ },
+ {
+ "epoch": 2.096330275229358,
+ "grad_norm": 0.1473352611064911,
+ "learning_rate": 4.692132884244113e-06,
+ "loss": 0.0091,
+ "step": 153
+ },
+ {
+ "epoch": 2.1100917431192663,
+ "grad_norm": 0.24198868870735168,
+ "learning_rate": 4.680661818405485e-06,
+ "loss": 0.0073,
+ "step": 154
+ },
+ {
+ "epoch": 2.123853211009174,
+ "grad_norm": 0.25488734245300293,
+ "learning_rate": 4.668995494647653e-06,
+ "loss": 0.0098,
+ "step": 155
+ },
+ {
+ "epoch": 2.1376146788990824,
+ "grad_norm": 0.17986920475959778,
+ "learning_rate": 4.657134957581057e-06,
+ "loss": 0.0081,
+ "step": 156
+ },
+ {
+ "epoch": 2.1513761467889907,
+ "grad_norm": 0.22465726733207703,
+ "learning_rate": 4.645081269206128e-06,
+ "loss": 0.0074,
+ "step": 157
+ },
+ {
+ "epoch": 2.165137614678899,
+ "grad_norm": 0.19391046464443207,
+ "learning_rate": 4.632835508818192e-06,
+ "loss": 0.0077,
+ "step": 158
+ },
+ {
+ "epoch": 2.1788990825688073,
+ "grad_norm": 0.25997886061668396,
+ "learning_rate": 4.620398772910833e-06,
+ "loss": 0.0074,
+ "step": 159
+ },
+ {
+ "epoch": 2.1926605504587156,
+ "grad_norm": 0.2422141134738922,
+ "learning_rate": 4.607772175077712e-06,
+ "loss": 0.0103,
+ "step": 160
+ },
+ {
+ "epoch": 2.206422018348624,
+ "grad_norm": 0.2785587012767792,
+ "learning_rate": 4.59495684591285e-06,
+ "loss": 0.0079,
+ "step": 161
+ },
+ {
+ "epoch": 2.220183486238532,
+ "grad_norm": 0.19141560792922974,
+ "learning_rate": 4.581953932909403e-06,
+ "loss": 0.0068,
+ "step": 162
+ },
+ {
+ "epoch": 2.2339449541284404,
+ "grad_norm": 0.152049720287323,
+ "learning_rate": 4.5687646003569055e-06,
+ "loss": 0.0063,
+ "step": 163
+ },
+ {
+ "epoch": 2.2477064220183487,
+ "grad_norm": 0.19313585758209229,
+ "learning_rate": 4.555390029237026e-06,
+ "loss": 0.0079,
+ "step": 164
+ },
+ {
+ "epoch": 2.261467889908257,
+ "grad_norm": 0.19979022443294525,
+ "learning_rate": 4.541831417117815e-06,
+ "loss": 0.0085,
+ "step": 165
+ },
+ {
+ "epoch": 2.2752293577981653,
+ "grad_norm": 0.2902522683143616,
+ "learning_rate": 4.528089978046481e-06,
+ "loss": 0.0082,
+ "step": 166
+ },
+ {
+ "epoch": 2.2889908256880735,
+ "grad_norm": 0.5130491256713867,
+ "learning_rate": 4.514166942440679e-06,
+ "loss": 0.0067,
+ "step": 167
+ },
+ {
+ "epoch": 2.302752293577982,
+ "grad_norm": 0.15980036556720734,
+ "learning_rate": 4.5000635569783365e-06,
+ "loss": 0.0093,
+ "step": 168
+ },
+ {
+ "epoch": 2.31651376146789,
+ "grad_norm": 0.19247184693813324,
+ "learning_rate": 4.4857810844860325e-06,
+ "loss": 0.0101,
+ "step": 169
+ },
+ {
+ "epoch": 2.330275229357798,
+ "grad_norm": 0.19173133373260498,
+ "learning_rate": 4.471320803825915e-06,
+ "loss": 0.0065,
+ "step": 170
+ },
+ {
+ "epoch": 2.3440366972477062,
+ "grad_norm": 0.1779325157403946,
+ "learning_rate": 4.4566840097811956e-06,
+ "loss": 0.0059,
+ "step": 171
+ },
+ {
+ "epoch": 2.3577981651376145,
+ "grad_norm": 0.1639624983072281,
+ "learning_rate": 4.4418720129402145e-06,
+ "loss": 0.0052,
+ "step": 172
+ },
+ {
+ "epoch": 2.371559633027523,
+ "grad_norm": 0.2355070263147354,
+ "learning_rate": 4.426886139579083e-06,
+ "loss": 0.0046,
+ "step": 173
+ },
+ {
+ "epoch": 2.385321100917431,
+ "grad_norm": 0.20461603999137878,
+ "learning_rate": 4.411727731542937e-06,
+ "loss": 0.0071,
+ "step": 174
+ },
+ {
+ "epoch": 2.3990825688073394,
+ "grad_norm": 0.20251843333244324,
+ "learning_rate": 4.39639814612578e-06,
+ "loss": 0.0054,
+ "step": 175
+ },
+ {
+ "epoch": 2.4128440366972477,
+ "grad_norm": 0.17055197060108185,
+ "learning_rate": 4.3808987559489536e-06,
+ "loss": 0.0052,
+ "step": 176
+ },
+ {
+ "epoch": 2.426605504587156,
+ "grad_norm": 0.2508833706378937,
+ "learning_rate": 4.365230948838232e-06,
+ "loss": 0.0071,
+ "step": 177
+ },
+ {
+ "epoch": 2.4403669724770642,
+ "grad_norm": 0.21865351498126984,
+ "learning_rate": 4.349396127699552e-06,
+ "loss": 0.0068,
+ "step": 178
+ },
+ {
+ "epoch": 2.4541284403669725,
+ "grad_norm": 0.225867360830307,
+ "learning_rate": 4.3333957103934025e-06,
+ "loss": 0.0072,
+ "step": 179
+ },
+ {
+ "epoch": 2.467889908256881,
+ "grad_norm": 0.48035845160484314,
+ "learning_rate": 4.317231129607859e-06,
+ "loss": 0.006,
+ "step": 180
+ },
+ {
+ "epoch": 2.481651376146789,
+ "grad_norm": 0.17027413845062256,
+ "learning_rate": 4.30090383273031e-06,
+ "loss": 0.0043,
+ "step": 181
+ },
+ {
+ "epoch": 2.4954128440366974,
+ "grad_norm": 0.2660333216190338,
+ "learning_rate": 4.2844152817178476e-06,
+ "loss": 0.0082,
+ "step": 182
+ },
+ {
+ "epoch": 2.5091743119266052,
+ "grad_norm": 0.2485552728176117,
+ "learning_rate": 4.267766952966369e-06,
+ "loss": 0.0059,
+ "step": 183
+ },
+ {
+ "epoch": 2.522935779816514,
+ "grad_norm": 0.2048122137784958,
+ "learning_rate": 4.2509603371783776e-06,
+ "loss": 0.0071,
+ "step": 184
+ },
+ {
+ "epoch": 2.536697247706422,
+ "grad_norm": 0.1725567877292633,
+ "learning_rate": 4.233996939229502e-06,
+ "loss": 0.0053,
+ "step": 185
+ },
+ {
+ "epoch": 2.5504587155963305,
+ "grad_norm": 0.14592835307121277,
+ "learning_rate": 4.216878278033753e-06,
+ "loss": 0.0044,
+ "step": 186
+ },
+ {
+ "epoch": 2.5642201834862384,
+ "grad_norm": 0.2868310213088989,
+ "learning_rate": 4.199605886407515e-06,
+ "loss": 0.0057,
+ "step": 187
+ },
+ {
+ "epoch": 2.5779816513761467,
+ "grad_norm": 0.5002567172050476,
+ "learning_rate": 4.1821813109322975e-06,
+ "loss": 0.0072,
+ "step": 188
+ },
+ {
+ "epoch": 2.591743119266055,
+ "grad_norm": 0.199919655919075,
+ "learning_rate": 4.164606111816256e-06,
+ "loss": 0.0043,
+ "step": 189
+ },
+ {
+ "epoch": 2.6055045871559632,
+ "grad_norm": 0.17280228435993195,
+ "learning_rate": 4.146881862754485e-06,
+ "loss": 0.0048,
+ "step": 190
+ },
+ {
+ "epoch": 2.6192660550458715,
+ "grad_norm": 0.17505577206611633,
+ "learning_rate": 4.129010150788112e-06,
+ "loss": 0.0037,
+ "step": 191
+ },
+ {
+ "epoch": 2.63302752293578,
+ "grad_norm": 0.21955189108848572,
+ "learning_rate": 4.110992576162193e-06,
+ "loss": 0.0044,
+ "step": 192
+ },
+ {
+ "epoch": 2.646788990825688,
+ "grad_norm": 0.22081787884235382,
+ "learning_rate": 4.092830752182423e-06,
+ "loss": 0.0036,
+ "step": 193
+ },
+ {
+ "epoch": 2.6605504587155964,
+ "grad_norm": 0.26361310482025146,
+ "learning_rate": 4.074526305070679e-06,
+ "loss": 0.0062,
+ "step": 194
+ },
+ {
+ "epoch": 2.6743119266055047,
+ "grad_norm": 0.15528841316699982,
+ "learning_rate": 4.056080873819412e-06,
+ "loss": 0.0029,
+ "step": 195
+ },
+ {
+ "epoch": 2.688073394495413,
+ "grad_norm": 6.002184867858887,
+ "learning_rate": 4.037496110044885e-06,
+ "loss": 0.0062,
+ "step": 196
+ },
+ {
+ "epoch": 2.7018348623853212,
+ "grad_norm": 0.12827160954475403,
+ "learning_rate": 4.018773677839289e-06,
+ "loss": 0.0025,
+ "step": 197
+ },
+ {
+ "epoch": 2.7155963302752295,
+ "grad_norm": 0.21857935190200806,
+ "learning_rate": 3.999915253621739e-06,
+ "loss": 0.004,
+ "step": 198
+ },
+ {
+ "epoch": 2.729357798165138,
+ "grad_norm": 0.231048583984375,
+ "learning_rate": 3.980922525988167e-06,
+ "loss": 0.0034,
+ "step": 199
+ },
+ {
+ "epoch": 2.7431192660550456,
+ "grad_norm": 0.16589054465293884,
+ "learning_rate": 3.961797195560118e-06,
+ "loss": 0.0045,
+ "step": 200
+ },
+ {
+ "epoch": 2.7568807339449544,
+ "grad_norm": 0.2721656858921051,
+ "learning_rate": 3.942540974832486e-06,
+ "loss": 0.0056,
+ "step": 201
+ },
+ {
+ "epoch": 2.770642201834862,
+ "grad_norm": 0.12930598855018616,
+ "learning_rate": 3.9231555880201655e-06,
+ "loss": 0.0037,
+ "step": 202
+ },
+ {
+ "epoch": 2.7844036697247705,
+ "grad_norm": 0.299055814743042,
+ "learning_rate": 3.903642770903671e-06,
+ "loss": 0.0036,
+ "step": 203
+ },
+ {
+ "epoch": 2.7981651376146788,
+ "grad_norm": 0.15592966973781586,
+ "learning_rate": 3.884004270673711e-06,
+ "loss": 0.0023,
+ "step": 204
+ },
+ {
+ "epoch": 2.811926605504587,
+ "grad_norm": 0.10496195405721664,
+ "learning_rate": 3.864241845774746e-06,
+ "loss": 0.0029,
+ "step": 205
+ },
+ {
+ "epoch": 2.8256880733944953,
+ "grad_norm": 0.1604638695716858,
+ "learning_rate": 3.844357265747531e-06,
+ "loss": 0.0032,
+ "step": 206
+ },
+ {
+ "epoch": 2.8394495412844036,
+ "grad_norm": 0.1436438113451004,
+ "learning_rate": 3.8243523110706736e-06,
+ "loss": 0.0035,
+ "step": 207
+ },
+ {
+ "epoch": 2.853211009174312,
+ "grad_norm": 0.176204651594162,
+ "learning_rate": 3.8042287730012117e-06,
+ "loss": 0.0022,
+ "step": 208
+ },
+ {
+ "epoch": 2.86697247706422,
+ "grad_norm": 0.3500923216342926,
+ "learning_rate": 3.7839884534142157e-06,
+ "loss": 0.0022,
+ "step": 209
+ },
+ {
+ "epoch": 2.8807339449541285,
+ "grad_norm": 0.2319999784231186,
+ "learning_rate": 3.7636331646414524e-06,
+ "loss": 0.003,
+ "step": 210
+ },
+ {
+ "epoch": 2.8944954128440368,
+ "grad_norm": 0.1358226090669632,
+ "learning_rate": 3.7431647293091076e-06,
+ "loss": 0.0023,
+ "step": 211
+ },
+ {
+ "epoch": 2.908256880733945,
+ "grad_norm": 0.1323612779378891,
+ "learning_rate": 3.7225849801745835e-06,
+ "loss": 0.0021,
+ "step": 212
+ },
+ {
+ "epoch": 2.9220183486238533,
+ "grad_norm": 0.10968377441167831,
+ "learning_rate": 3.701895759962397e-06,
+ "loss": 0.002,
+ "step": 213
+ },
+ {
+ "epoch": 2.9357798165137616,
+ "grad_norm": 0.11163649708032608,
+ "learning_rate": 3.6810989211991777e-06,
+ "loss": 0.0015,
+ "step": 214
+ },
+ {
+ "epoch": 2.9495412844036695,
+ "grad_norm": 0.6103344559669495,
+ "learning_rate": 3.6601963260477923e-06,
+ "loss": 0.0051,
+ "step": 215
+ },
+ {
+ "epoch": 2.963302752293578,
+ "grad_norm": 0.1300484985113144,
+ "learning_rate": 3.6391898461406045e-06,
+ "loss": 0.0018,
+ "step": 216
+ },
+ {
+ "epoch": 2.977064220183486,
+ "grad_norm": 0.11599847674369812,
+ "learning_rate": 3.6180813624118898e-06,
+ "loss": 0.0021,
+ "step": 217
+ },
+ {
+ "epoch": 2.9908256880733948,
+ "grad_norm": 0.14168186485767365,
+ "learning_rate": 3.5968727649294134e-06,
+ "loss": 0.0018,
+ "step": 218
+ },
+ {
+ "epoch": 3.0,
+ "grad_norm": 0.16039852797985077,
+ "learning_rate": 3.575565952725193e-06,
+ "loss": 0.0014,
+ "step": 219
+ },
+ {
+ "epoch": 3.0137614678899083,
+ "grad_norm": 0.08175123482942581,
+ "learning_rate": 3.55416283362546e-06,
+ "loss": 0.0008,
+ "step": 220
+ },
+ {
+ "epoch": 3.0275229357798166,
+ "grad_norm": 0.07637064158916473,
+ "learning_rate": 3.5326653240798283e-06,
+ "loss": 0.0007,
+ "step": 221
+ },
+ {
+ "epoch": 3.041284403669725,
+ "grad_norm": 0.061755988746881485,
+ "learning_rate": 3.5110753489896924e-06,
+ "loss": 0.0007,
+ "step": 222
+ },
+ {
+ "epoch": 3.055045871559633,
+ "grad_norm": 0.05573924630880356,
+ "learning_rate": 3.4893948415358803e-06,
+ "loss": 0.0008,
+ "step": 223
+ },
+ {
+ "epoch": 3.0688073394495414,
+ "grad_norm": 0.18670693039894104,
+ "learning_rate": 3.4676257430055438e-06,
+ "loss": 0.0007,
+ "step": 224
+ },
+ {
+ "epoch": 3.0825688073394497,
+ "grad_norm": 0.05674147605895996,
+ "learning_rate": 3.4457700026183378e-06,
+ "loss": 0.0008,
+ "step": 225
+ },
+ {
+ "epoch": 3.096330275229358,
+ "grad_norm": 0.08101407438516617,
+ "learning_rate": 3.4238295773518924e-06,
+ "loss": 0.0006,
+ "step": 226
+ },
+ {
+ "epoch": 3.1100917431192663,
+ "grad_norm": 0.09028138220310211,
+ "learning_rate": 3.4018064317665745e-06,
+ "loss": 0.0011,
+ "step": 227
+ },
+ {
+ "epoch": 3.123853211009174,
+ "grad_norm": 0.0955042913556099,
+ "learning_rate": 3.3797025378295826e-06,
+ "loss": 0.0008,
+ "step": 228
+ },
+ {
+ "epoch": 3.1376146788990824,
+ "grad_norm": 0.09355667978525162,
+ "learning_rate": 3.357519874738382e-06,
+ "loss": 0.0011,
+ "step": 229
+ },
+ {
+ "epoch": 3.1513761467889907,
+ "grad_norm": 0.06623287498950958,
+ "learning_rate": 3.3352604287434752e-06,
+ "loss": 0.0006,
+ "step": 230
+ },
+ {
+ "epoch": 3.165137614678899,
+ "grad_norm": 0.07880504429340363,
+ "learning_rate": 3.31292619297056e-06,
+ "loss": 0.0007,
+ "step": 231
+ },
+ {
+ "epoch": 3.1788990825688073,
+ "grad_norm": 0.06904889643192291,
+ "learning_rate": 3.29051916724206e-06,
+ "loss": 0.0005,
+ "step": 232
+ },
+ {
+ "epoch": 3.1926605504587156,
+ "grad_norm": 0.23911181092262268,
+ "learning_rate": 3.2680413578980623e-06,
+ "loss": 0.0007,
+ "step": 233
+ },
+ {
+ "epoch": 3.206422018348624,
+ "grad_norm": 0.15154607594013214,
+ "learning_rate": 3.245494777616664e-06,
+ "loss": 0.0015,
+ "step": 234
+ },
+ {
+ "epoch": 3.220183486238532,
+ "grad_norm": 0.15627366304397583,
+ "learning_rate": 3.2228814452337587e-06,
+ "loss": 0.001,
+ "step": 235
+ },
+ {
+ "epoch": 3.2339449541284404,
+ "grad_norm": 0.0780797079205513,
+ "learning_rate": 3.2002033855622683e-06,
+ "loss": 0.0005,
+ "step": 236
+ },
+ {
+ "epoch": 3.2477064220183487,
+ "grad_norm": 0.7883831858634949,
+ "learning_rate": 3.177462629210838e-06,
+ "loss": 0.0005,
+ "step": 237
+ },
+ {
+ "epoch": 3.261467889908257,
+ "grad_norm": 0.18375582993030548,
+ "learning_rate": 3.154661212402017e-06,
+ "loss": 0.0006,
+ "step": 238
+ },
+ {
+ "epoch": 3.2752293577981653,
+ "grad_norm": 0.09809675812721252,
+ "learning_rate": 3.131801176789934e-06,
+ "loss": 0.001,
+ "step": 239
+ },
+ {
+ "epoch": 3.2889908256880735,
+ "grad_norm": 0.04473511874675751,
+ "learning_rate": 3.1088845692774798e-06,
+ "loss": 0.0002,
+ "step": 240
+ },
+ {
+ "epoch": 3.302752293577982,
+ "grad_norm": 0.07583656907081604,
+ "learning_rate": 3.0859134418330373e-06,
+ "loss": 0.0007,
+ "step": 241
+ },
+ {
+ "epoch": 3.31651376146789,
+ "grad_norm": 0.06534383445978165,
+ "learning_rate": 3.0628898513067357e-06,
+ "loss": 0.0003,
+ "step": 242
+ },
+ {
+ "epoch": 3.330275229357798,
+ "grad_norm": 0.09651501476764679,
+ "learning_rate": 3.0398158592462847e-06,
+ "loss": 0.0012,
+ "step": 243
+ },
+ {
+ "epoch": 3.3440366972477062,
+ "grad_norm": 0.07052983343601227,
+ "learning_rate": 3.0166935317123824e-06,
+ "loss": 0.0007,
+ "step": 244
+ },
+ {
+ "epoch": 3.3577981651376145,
+ "grad_norm": 0.09956210106611252,
+ "learning_rate": 2.9935249390937184e-06,
+ "loss": 0.0009,
+ "step": 245
+ },
+ {
+ "epoch": 3.371559633027523,
+ "grad_norm": 0.051604535430669785,
+ "learning_rate": 2.970312155921585e-06,
+ "loss": 0.0005,
+ "step": 246
+ },
+ {
+ "epoch": 3.385321100917431,
+ "grad_norm": 0.16182328760623932,
+ "learning_rate": 2.9470572606841295e-06,
+ "loss": 0.0006,
+ "step": 247
+ },
+ {
+ "epoch": 3.3990825688073394,
+ "grad_norm": 0.07653603702783585,
+ "learning_rate": 2.9237623356402423e-06,
+ "loss": 0.0005,
+ "step": 248
+ },
+ {
+ "epoch": 3.4128440366972477,
+ "grad_norm": 0.0773971900343895,
+ "learning_rate": 2.900429466633107e-06,
+ "loss": 0.0009,
+ "step": 249
+ },
+ {
+ "epoch": 3.426605504587156,
+ "grad_norm": 0.1629229336977005,
+ "learning_rate": 2.8770607429034352e-06,
+ "loss": 0.0013,
+ "step": 250
+ },
+ {
+ "epoch": 3.4403669724770642,
+ "grad_norm": 0.1734458953142166,
+ "learning_rate": 2.8536582569023964e-06,
+ "loss": 0.0009,
+ "step": 251
+ },
+ {
+ "epoch": 3.4541284403669725,
+ "grad_norm": 0.08963964134454727,
+ "learning_rate": 2.8302241041042564e-06,
+ "loss": 0.0004,
+ "step": 252
+ },
+ {
+ "epoch": 3.467889908256881,
+ "grad_norm": 0.037656184285879135,
+ "learning_rate": 2.8067603828187446e-06,
+ "loss": 0.0002,
+ "step": 253
+ },
+ {
+ "epoch": 3.481651376146789,
+ "grad_norm": 0.07996565848588943,
+ "learning_rate": 2.7832691940031755e-06,
+ "loss": 0.0006,
+ "step": 254
+ },
+ {
+ "epoch": 3.4954128440366974,
+ "grad_norm": 0.39782819151878357,
+ "learning_rate": 2.759752641074322e-06,
+ "loss": 0.0004,
+ "step": 255
+ },
+ {
+ "epoch": 3.5091743119266052,
+ "grad_norm": 0.056630104780197144,
+ "learning_rate": 2.7362128297200784e-06,
+ "loss": 0.0004,
+ "step": 256
+ },
+ {
+ "epoch": 3.522935779816514,
+ "grad_norm": 0.18943996727466583,
+ "learning_rate": 2.712651867710914e-06,
+ "loss": 0.0017,
+ "step": 257
+ },
+ {
+ "epoch": 3.536697247706422,
+ "grad_norm": 0.04645173251628876,
+ "learning_rate": 2.6890718647111424e-06,
+ "loss": 0.0004,
+ "step": 258
+ },
+ {
+ "epoch": 3.5504587155963305,
+ "grad_norm": 0.07644187659025192,
+ "learning_rate": 2.665474932090017e-06,
+ "loss": 0.0008,
+ "step": 259
+ },
+ {
+ "epoch": 3.5642201834862384,
+ "grad_norm": 0.04974009841680527,
+ "learning_rate": 2.6418631827326857e-06,
+ "loss": 0.0005,
+ "step": 260
+ },
+ {
+ "epoch": 3.5779816513761467,
+ "grad_norm": 0.06213025003671646,
+ "learning_rate": 2.6182387308509927e-06,
+ "loss": 0.0005,
+ "step": 261
+ },
+ {
+ "epoch": 3.591743119266055,
+ "grad_norm": 0.09061244130134583,
+ "learning_rate": 2.5946036917941765e-06,
+ "loss": 0.0006,
+ "step": 262
+ },
+ {
+ "epoch": 3.6055045871559632,
+ "grad_norm": 0.0796905905008316,
+ "learning_rate": 2.570960181859458e-06,
+ "loss": 0.0006,
+ "step": 263
+ },
+ {
+ "epoch": 3.6192660550458715,
+ "grad_norm": 0.19685125350952148,
+ "learning_rate": 2.547310318102548e-06,
+ "loss": 0.0006,
+ "step": 264
+ },
+ {
+ "epoch": 3.63302752293578,
+ "grad_norm": 0.030696067959070206,
+ "learning_rate": 2.5236562181480794e-06,
+ "loss": 0.0001,
+ "step": 265
+ },
+ {
+ "epoch": 3.646788990825688,
+ "grad_norm": 0.04516645520925522,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0004,
+ "step": 266
+ },
+ {
+ "epoch": 3.6605504587155964,
+ "grad_norm": 0.09353721141815186,
+ "learning_rate": 2.4763437818519205e-06,
+ "loss": 0.0003,
+ "step": 267
+ },
+ {
+ "epoch": 3.6743119266055047,
+ "grad_norm": 0.05623761937022209,
+ "learning_rate": 2.4526896818974534e-06,
+ "loss": 0.0003,
+ "step": 268
+ },
+ {
+ "epoch": 3.688073394495413,
+ "grad_norm": 0.11353174597024918,
+ "learning_rate": 2.429039818140543e-06,
+ "loss": 0.0002,
+ "step": 269
+ },
+ {
+ "epoch": 3.7018348623853212,
+ "grad_norm": 0.03043302521109581,
+ "learning_rate": 2.405396308205825e-06,
+ "loss": 0.0002,
+ "step": 270
+ },
+ {
+ "epoch": 3.7155963302752295,
+ "grad_norm": 0.05028878524899483,
+ "learning_rate": 2.381761269149009e-06,
+ "loss": 0.0002,
+ "step": 271
+ },
+ {
+ "epoch": 3.729357798165138,
+ "grad_norm": 0.03009975329041481,
+ "learning_rate": 2.358136817267315e-06,
+ "loss": 0.0002,
+ "step": 272
+ },
+ {
+ "epoch": 3.7431192660550456,
+ "grad_norm": 0.08547350764274597,
+ "learning_rate": 2.334525067909983e-06,
+ "loss": 0.0005,
+ "step": 273
+ },
+ {
+ "epoch": 3.7568807339449544,
+ "grad_norm": 0.03611677512526512,
+ "learning_rate": 2.3109281352888593e-06,
+ "loss": 0.0002,
+ "step": 274
+ },
+ {
+ "epoch": 3.770642201834862,
+ "grad_norm": 0.03364509344100952,
+ "learning_rate": 2.2873481322890866e-06,
+ "loss": 0.0001,
+ "step": 275
+ },
+ {
+ "epoch": 3.7844036697247705,
+ "grad_norm": 0.04283633828163147,
+ "learning_rate": 2.263787170279922e-06,
+ "loss": 0.0003,
+ "step": 276
+ },
+ {
+ "epoch": 3.7981651376146788,
+ "grad_norm": 0.057849906384944916,
+ "learning_rate": 2.2402473589256793e-06,
+ "loss": 0.0002,
+ "step": 277
+ },
+ {
+ "epoch": 3.811926605504587,
+ "grad_norm": 0.12497337907552719,
+ "learning_rate": 2.2167308059968258e-06,
+ "loss": 0.0002,
+ "step": 278
+ },
+ {
+ "epoch": 3.8256880733944953,
+ "grad_norm": 0.044824711978435516,
+ "learning_rate": 2.193239617181256e-06,
+ "loss": 0.0003,
+ "step": 279
+ },
+ {
+ "epoch": 3.8394495412844036,
+ "grad_norm": 0.06536471843719482,
+ "learning_rate": 2.169775895895745e-06,
+ "loss": 0.0002,
+ "step": 280
+ },
+ {
+ "epoch": 3.853211009174312,
+ "grad_norm": 0.07215467095375061,
+ "learning_rate": 2.146341743097604e-06,
+ "loss": 0.0002,
+ "step": 281
+ },
+ {
+ "epoch": 3.86697247706422,
+ "grad_norm": 0.01871681585907936,
+ "learning_rate": 2.1229392570965656e-06,
+ "loss": 0.0001,
+ "step": 282
+ },
+ {
+ "epoch": 3.8807339449541285,
+ "grad_norm": 0.043813955038785934,
+ "learning_rate": 2.0995705333668948e-06,
+ "loss": 0.0002,
+ "step": 283
+ },
+ {
+ "epoch": 3.8944954128440368,
+ "grad_norm": 0.046582091599702835,
+ "learning_rate": 2.0762376643597586e-06,
+ "loss": 0.0003,
+ "step": 284
+ },
+ {
+ "epoch": 3.908256880733945,
+ "grad_norm": 0.06702767312526703,
+ "learning_rate": 2.0529427393158704e-06,
+ "loss": 0.0004,
+ "step": 285
+ },
+ {
+ "epoch": 3.9220183486238533,
+ "grad_norm": 0.053124528378248215,
+ "learning_rate": 2.0296878440784164e-06,
+ "loss": 0.0005,
+ "step": 286
+ },
+ {
+ "epoch": 3.9357798165137616,
+ "grad_norm": 0.029215684160590172,
+ "learning_rate": 2.006475060906283e-06,
+ "loss": 0.0002,
+ "step": 287
+ },
+ {
+ "epoch": 3.9495412844036695,
+ "grad_norm": 0.03736970201134682,
+ "learning_rate": 1.9833064682876175e-06,
+ "loss": 0.0001,
+ "step": 288
+ },
+ {
+ "epoch": 3.963302752293578,
+ "grad_norm": 0.0413820743560791,
+ "learning_rate": 1.9601841407537157e-06,
+ "loss": 0.0002,
+ "step": 289
+ },
+ {
+ "epoch": 3.977064220183486,
+ "grad_norm": 0.07081856578588486,
+ "learning_rate": 1.937110148693265e-06,
+ "loss": 0.0004,
+ "step": 290
+ },
+ {
+ "epoch": 3.9908256880733948,
+ "grad_norm": 0.0220099538564682,
+ "learning_rate": 1.9140865581669627e-06,
+ "loss": 0.0001,
+ "step": 291
+ },
+ {
+ "epoch": 4.0,
+ "grad_norm": 0.0220099538564682,
+ "learning_rate": 1.8911154307225204e-06,
+ "loss": 0.0001,
+ "step": 292
+ },
+ {
+ "epoch": 4.013761467889908,
+ "grad_norm": 0.0368737168610096,
+ "learning_rate": 1.8681988232100674e-06,
+ "loss": 0.0002,
+ "step": 293
+ },
+ {
+ "epoch": 4.027522935779817,
+ "grad_norm": 0.024728944525122643,
+ "learning_rate": 1.8453387875979834e-06,
+ "loss": 0.0001,
+ "step": 294
+ },
+ {
+ "epoch": 4.041284403669724,
+ "grad_norm": 0.027091216295957565,
+ "learning_rate": 1.822537370789163e-06,
+ "loss": 0.0002,
+ "step": 295
+ },
+ {
+ "epoch": 4.055045871559633,
+ "grad_norm": 0.05742163583636284,
+ "learning_rate": 1.7997966144377328e-06,
+ "loss": 0.0001,
+ "step": 296
+ },
+ {
+ "epoch": 4.068807339449541,
+ "grad_norm": 0.011909076012670994,
+ "learning_rate": 1.7771185547662417e-06,
+ "loss": 0.0,
+ "step": 297
+ },
+ {
+ "epoch": 4.08256880733945,
+ "grad_norm": 0.005773312412202358,
+ "learning_rate": 1.754505222383337e-06,
+ "loss": 0.0,
+ "step": 298
+ },
+ {
+ "epoch": 4.0963302752293576,
+ "grad_norm": 0.012064033187925816,
+ "learning_rate": 1.7319586421019383e-06,
+ "loss": 0.0,
+ "step": 299
+ },
+ {
+ "epoch": 4.110091743119266,
+ "grad_norm": 0.00871270801872015,
+ "learning_rate": 1.7094808327579401e-06,
+ "loss": 0.0001,
+ "step": 300
+ },
+ {
+ "epoch": 4.123853211009174,
+ "grad_norm": 0.015500242821872234,
+ "learning_rate": 1.6870738070294412e-06,
+ "loss": 0.0001,
+ "step": 301
+ },
+ {
+ "epoch": 4.137614678899083,
+ "grad_norm": 0.010978137142956257,
+ "learning_rate": 1.6647395712565256e-06,
+ "loss": 0.0,
+ "step": 302
+ },
+ {
+ "epoch": 4.151376146788991,
+ "grad_norm": 0.011058066971600056,
+ "learning_rate": 1.6424801252616186e-06,
+ "loss": 0.0001,
+ "step": 303
+ },
+ {
+ "epoch": 4.165137614678899,
+ "grad_norm": 0.029183728620409966,
+ "learning_rate": 1.6202974621704176e-06,
+ "loss": 0.0001,
+ "step": 304
+ },
+ {
+ "epoch": 4.178899082568807,
+ "grad_norm": 0.016758723184466362,
+ "learning_rate": 1.5981935682334266e-06,
+ "loss": 0.0002,
+ "step": 305
+ },
+ {
+ "epoch": 4.192660550458716,
+ "grad_norm": 0.02776522748172283,
+ "learning_rate": 1.5761704226481078e-06,
+ "loss": 0.0001,
+ "step": 306
+ },
+ {
+ "epoch": 4.206422018348624,
+ "grad_norm": 0.008353229612112045,
+ "learning_rate": 1.5542299973816626e-06,
+ "loss": 0.0001,
+ "step": 307
+ },
+ {
+ "epoch": 4.220183486238533,
+ "grad_norm": 0.019200339913368225,
+ "learning_rate": 1.5323742569944573e-06,
+ "loss": 0.0001,
+ "step": 308
+ },
+ {
+ "epoch": 4.23394495412844,
+ "grad_norm": 0.023347314447164536,
+ "learning_rate": 1.5106051584641208e-06,
+ "loss": 0.0001,
+ "step": 309
+ },
+ {
+ "epoch": 4.247706422018348,
+ "grad_norm": 0.035208187997341156,
+ "learning_rate": 1.4889246510103078e-06,
+ "loss": 0.0001,
+ "step": 310
+ },
+ {
+ "epoch": 4.261467889908257,
+ "grad_norm": 0.0710497498512268,
+ "learning_rate": 1.4673346759201728e-06,
+ "loss": 0.0,
+ "step": 311
+ },
+ {
+ "epoch": 4.275229357798165,
+ "grad_norm": 0.018748018890619278,
+ "learning_rate": 1.44583716637454e-06,
+ "loss": 0.0001,
+ "step": 312
+ },
+ {
+ "epoch": 4.2889908256880735,
+ "grad_norm": 0.054301682859659195,
+ "learning_rate": 1.4244340472748076e-06,
+ "loss": 0.0001,
+ "step": 313
+ },
+ {
+ "epoch": 4.302752293577981,
+ "grad_norm": 0.020265033468604088,
+ "learning_rate": 1.403127235070587e-06,
+ "loss": 0.0001,
+ "step": 314
+ },
+ {
+ "epoch": 4.31651376146789,
+ "grad_norm": 0.008297888562083244,
+ "learning_rate": 1.381918637588112e-06,
+ "loss": 0.0,
+ "step": 315
+ },
+ {
+ "epoch": 4.330275229357798,
+ "grad_norm": 0.018647175282239914,
+ "learning_rate": 1.3608101538593965e-06,
+ "loss": 0.0001,
+ "step": 316
+ },
+ {
+ "epoch": 4.344036697247707,
+ "grad_norm": 0.02466970682144165,
+ "learning_rate": 1.3398036739522088e-06,
+ "loss": 0.0002,
+ "step": 317
+ },
+ {
+ "epoch": 4.3577981651376145,
+ "grad_norm": 0.04142339527606964,
+ "learning_rate": 1.3189010788008234e-06,
+ "loss": 0.0001,
+ "step": 318
+ },
+ {
+ "epoch": 4.371559633027523,
+ "grad_norm": 0.025369996204972267,
+ "learning_rate": 1.2981042400376032e-06,
+ "loss": 0.0001,
+ "step": 319
+ },
+ {
+ "epoch": 4.385321100917431,
+ "grad_norm": 0.009671038947999477,
+ "learning_rate": 1.277415019825417e-06,
+ "loss": 0.0,
+ "step": 320
+ },
+ {
+ "epoch": 4.39908256880734,
+ "grad_norm": 0.012685295194387436,
+ "learning_rate": 1.2568352706908937e-06,
+ "loss": 0.0001,
+ "step": 321
+ },
+ {
+ "epoch": 4.412844036697248,
+ "grad_norm": 0.05089607089757919,
+ "learning_rate": 1.2363668353585486e-06,
+ "loss": 0.0001,
+ "step": 322
+ },
+ {
+ "epoch": 4.426605504587156,
+ "grad_norm": 0.024581842124462128,
+ "learning_rate": 1.216011546585785e-06,
+ "loss": 0.0001,
+ "step": 323
+ },
+ {
+ "epoch": 4.440366972477064,
+ "grad_norm": 0.012231200002133846,
+ "learning_rate": 1.195771226998789e-06,
+ "loss": 0.0001,
+ "step": 324
+ },
+ {
+ "epoch": 4.454128440366972,
+ "grad_norm": 0.02831755019724369,
+ "learning_rate": 1.1756476889293269e-06,
+ "loss": 0.0001,
+ "step": 325
+ },
+ {
+ "epoch": 4.467889908256881,
+ "grad_norm": 0.05837830901145935,
+ "learning_rate": 1.1556427342524698e-06,
+ "loss": 0.0001,
+ "step": 326
+ },
+ {
+ "epoch": 4.481651376146789,
+ "grad_norm": 0.015133843757212162,
+ "learning_rate": 1.1357581542252555e-06,
+ "loss": 0.0,
+ "step": 327
+ },
+ {
+ "epoch": 4.495412844036697,
+ "grad_norm": 0.004880247637629509,
+ "learning_rate": 1.1159957293262888e-06,
+ "loss": 0.0,
+ "step": 328
+ },
+ {
+ "epoch": 4.509174311926605,
+ "grad_norm": 0.017852261662483215,
+ "learning_rate": 1.0963572290963298e-06,
+ "loss": 0.0001,
+ "step": 329
+ },
+ {
+ "epoch": 4.522935779816514,
+ "grad_norm": 0.007527775596827269,
+ "learning_rate": 1.0768444119798357e-06,
+ "loss": 0.0001,
+ "step": 330
+ },
+ {
+ "epoch": 4.536697247706422,
+ "grad_norm": 0.00978136993944645,
+ "learning_rate": 1.0574590251675145e-06,
+ "loss": 0.0,
+ "step": 331
+ },
+ {
+ "epoch": 4.5504587155963305,
+ "grad_norm": 0.011405237950384617,
+ "learning_rate": 1.0382028044398823e-06,
+ "loss": 0.0,
+ "step": 332
+ },
+ {
+ "epoch": 4.564220183486238,
+ "grad_norm": 0.004018036648631096,
+ "learning_rate": 1.0190774740118343e-06,
+ "loss": 0.0,
+ "step": 333
+ },
+ {
+ "epoch": 4.577981651376147,
+ "grad_norm": 0.0168945100158453,
+ "learning_rate": 1.0000847463782615e-06,
+ "loss": 0.0001,
+ "step": 334
+ },
+ {
+ "epoch": 4.591743119266055,
+ "grad_norm": 0.008481111377477646,
+ "learning_rate": 9.812263221607114e-07,
+ "loss": 0.0,
+ "step": 335
+ },
+ {
+ "epoch": 4.605504587155964,
+ "grad_norm": 0.014810923486948013,
+ "learning_rate": 9.625038899551162e-07,
+ "loss": 0.0,
+ "step": 336
+ },
+ {
+ "epoch": 4.6192660550458715,
+ "grad_norm": 0.03142401948571205,
+ "learning_rate": 9.439191261805894e-07,
+ "loss": 0.0001,
+ "step": 337
+ },
+ {
+ "epoch": 4.63302752293578,
+ "grad_norm": 0.025308527052402496,
+ "learning_rate": 9.254736949293216e-07,
+ "loss": 0.0,
+ "step": 338
+ },
+ {
+ "epoch": 4.646788990825688,
+ "grad_norm": 0.021518364548683167,
+ "learning_rate": 9.07169247817579e-07,
+ "loss": 0.0001,
+ "step": 339
+ },
+ {
+ "epoch": 4.660550458715596,
+ "grad_norm": 0.007280074991285801,
+ "learning_rate": 8.890074238378074e-07,
+ "loss": 0.0,
+ "step": 340
+ },
+ {
+ "epoch": 4.674311926605505,
+ "grad_norm": 0.007103382609784603,
+ "learning_rate": 8.709898492118885e-07,
+ "loss": 0.0,
+ "step": 341
+ },
+ {
+ "epoch": 4.6880733944954125,
+ "grad_norm": 0.0127399992197752,
+ "learning_rate": 8.531181372455161e-07,
+ "loss": 0.0001,
+ "step": 342
+ },
+ {
+ "epoch": 4.701834862385321,
+ "grad_norm": 0.01236600149422884,
+ "learning_rate": 8.353938881837445e-07,
+ "loss": 0.0001,
+ "step": 343
+ },
+ {
+ "epoch": 4.715596330275229,
+ "grad_norm": 0.01739378273487091,
+ "learning_rate": 8.178186890677029e-07,
+ "loss": 0.0001,
+ "step": 344
+ },
+ {
+ "epoch": 4.729357798165138,
+ "grad_norm": 0.02507130056619644,
+ "learning_rate": 8.003941135924859e-07,
+ "loss": 0.0001,
+ "step": 345
+ },
+ {
+ "epoch": 4.743119266055046,
+ "grad_norm": 0.04195310175418854,
+ "learning_rate": 7.83121721966248e-07,
+ "loss": 0.0,
+ "step": 346
+ },
+ {
+ "epoch": 4.756880733944954,
+ "grad_norm": 0.04957769811153412,
+ "learning_rate": 7.66003060770498e-07,
+ "loss": 0.0002,
+ "step": 347
+ },
+ {
+ "epoch": 4.770642201834862,
+ "grad_norm": 0.015677858144044876,
+ "learning_rate": 7.490396628216237e-07,
+ "loss": 0.0,
+ "step": 348
+ },
+ {
+ "epoch": 4.784403669724771,
+ "grad_norm": 0.009202621877193451,
+ "learning_rate": 7.322330470336314e-07,
+ "loss": 0.0001,
+ "step": 349
+ },
+ {
+ "epoch": 4.798165137614679,
+ "grad_norm": 0.0277025755494833,
+ "learning_rate": 7.155847182821524e-07,
+ "loss": 0.0001,
+ "step": 350
+ },
+ {
+ "epoch": 4.8119266055045875,
+ "grad_norm": 0.022610262036323547,
+ "learning_rate": 6.990961672696908e-07,
+ "loss": 0.0001,
+ "step": 351
+ },
+ {
+ "epoch": 4.825688073394495,
+ "grad_norm": 0.008024164475500584,
+ "learning_rate": 6.827688703921407e-07,
+ "loss": 0.0001,
+ "step": 352
+ },
+ {
+ "epoch": 4.839449541284404,
+ "grad_norm": 0.025318168103694916,
+ "learning_rate": 6.666042896065983e-07,
+ "loss": 0.0001,
+ "step": 353
+ },
+ {
+ "epoch": 4.853211009174312,
+ "grad_norm": 0.022787343710660934,
+ "learning_rate": 6.506038723004484e-07,
+ "loss": 0.0001,
+ "step": 354
+ },
+ {
+ "epoch": 4.86697247706422,
+ "grad_norm": 0.04321616515517235,
+ "learning_rate": 6.347690511617693e-07,
+ "loss": 0.0002,
+ "step": 355
+ },
+ {
+ "epoch": 4.8807339449541285,
+ "grad_norm": 0.03710443153977394,
+ "learning_rate": 6.191012440510469e-07,
+ "loss": 0.0,
+ "step": 356
+ },
+ {
+ "epoch": 4.894495412844036,
+ "grad_norm": 0.012855797074735165,
+ "learning_rate": 6.036018538742208e-07,
+ "loss": 0.0,
+ "step": 357
+ },
+ {
+ "epoch": 4.908256880733945,
+ "grad_norm": 0.013949613086879253,
+ "learning_rate": 5.882722684570638e-07,
+ "loss": 0.0001,
+ "step": 358
+ },
+ {
+ "epoch": 4.922018348623853,
+ "grad_norm": 0.03625642880797386,
+ "learning_rate": 5.731138604209169e-07,
+ "loss": 0.0,
+ "step": 359
+ },
+ {
+ "epoch": 4.935779816513762,
+ "grad_norm": 0.006596633233129978,
+ "learning_rate": 5.581279870597866e-07,
+ "loss": 0.0,
+ "step": 360
+ },
+ {
+ "epoch": 4.9495412844036695,
+ "grad_norm": 0.012400100938975811,
+ "learning_rate": 5.433159902188043e-07,
+ "loss": 0.0001,
+ "step": 361
+ },
+ {
+ "epoch": 4.963302752293578,
+ "grad_norm": 0.008633424527943134,
+ "learning_rate": 5.286791961740855e-07,
+ "loss": 0.0,
+ "step": 362
+ },
+ {
+ "epoch": 4.977064220183486,
+ "grad_norm": 0.011777007952332497,
+ "learning_rate": 5.142189155139685e-07,
+ "loss": 0.0001,
+ "step": 363
+ },
+ {
+ "epoch": 4.990825688073395,
+ "grad_norm": 0.00882638804614544,
+ "learning_rate": 4.999364430216639e-07,
+ "loss": 0.0,
+ "step": 364
+ },
+ {
+ "epoch": 5.0,
+ "grad_norm": 0.03437481075525284,
+ "learning_rate": 4.85833057559322e-07,
+ "loss": 0.0,
+ "step": 365
+ },
+ {
+ "epoch": 5.013761467889908,
+ "grad_norm": 0.008769993670284748,
+ "learning_rate": 4.719100219535194e-07,
+ "loss": 0.0,
+ "step": 366
+ },
+ {
+ "epoch": 5.027522935779817,
+ "grad_norm": 0.019461363554000854,
+ "learning_rate": 4.581685828821858e-07,
+ "loss": 0.0,
+ "step": 367
+ },
+ {
+ "epoch": 5.041284403669724,
+ "grad_norm": 0.005676521919667721,
+ "learning_rate": 4.4460997076297504e-07,
+ "loss": 0.0,
+ "step": 368
+ },
+ {
+ "epoch": 5.055045871559633,
+ "grad_norm": 0.007226955145597458,
+ "learning_rate": 4.3123539964309486e-07,
+ "loss": 0.0001,
+ "step": 369
+ },
+ {
+ "epoch": 5.068807339449541,
+ "grad_norm": 0.01488519087433815,
+ "learning_rate": 4.180460670905978e-07,
+ "loss": 0.0,
+ "step": 370
+ },
+ {
+ "epoch": 5.08256880733945,
+ "grad_norm": 0.007269134745001793,
+ "learning_rate": 4.0504315408714993e-07,
+ "loss": 0.0,
+ "step": 371
+ },
+ {
+ "epoch": 5.0963302752293576,
+ "grad_norm": 0.014093892648816109,
+ "learning_rate": 3.922278249222894e-07,
+ "loss": 0.0,
+ "step": 372
+ },
+ {
+ "epoch": 5.110091743119266,
+ "grad_norm": 0.0031274943612515926,
+ "learning_rate": 3.796012270891672e-07,
+ "loss": 0.0,
+ "step": 373
+ },
+ {
+ "epoch": 5.123853211009174,
+ "grad_norm": 0.026903197169303894,
+ "learning_rate": 3.671644911818084e-07,
+ "loss": 0.0,
+ "step": 374
+ },
+ {
+ "epoch": 5.137614678899083,
+ "grad_norm": 0.008945458568632603,
+ "learning_rate": 3.549187307938726e-07,
+ "loss": 0.0001,
+ "step": 375
+ },
+ {
+ "epoch": 5.151376146788991,
+ "grad_norm": 0.0053525660187006,
+ "learning_rate": 3.4286504241894283e-07,
+ "loss": 0.0,
+ "step": 376
+ },
+ {
+ "epoch": 5.165137614678899,
+ "grad_norm": 0.013901080936193466,
+ "learning_rate": 3.310045053523475e-07,
+ "loss": 0.0001,
+ "step": 377
+ },
+ {
+ "epoch": 5.178899082568807,
+ "grad_norm": 0.009782910346984863,
+ "learning_rate": 3.1933818159451566e-07,
+ "loss": 0.0,
+ "step": 378
+ },
+ {
+ "epoch": 5.192660550458716,
+ "grad_norm": 0.013893576338887215,
+ "learning_rate": 3.078671157558877e-07,
+ "loss": 0.0001,
+ "step": 379
+ },
+ {
+ "epoch": 5.206422018348624,
+ "grad_norm": 0.007590492255985737,
+ "learning_rate": 2.965923349633779e-07,
+ "loss": 0.0,
+ "step": 380
+ },
+ {
+ "epoch": 5.220183486238533,
+ "grad_norm": 0.013813167810440063,
+ "learning_rate": 2.8551484876840815e-07,
+ "loss": 0.0,
+ "step": 381
+ },
+ {
+ "epoch": 5.23394495412844,
+ "grad_norm": 0.005057603120803833,
+ "learning_rate": 2.7463564905650855e-07,
+ "loss": 0.0,
+ "step": 382
+ },
+ {
+ "epoch": 5.247706422018348,
+ "grad_norm": 0.0055967653170228004,
+ "learning_rate": 2.639557099585047e-07,
+ "loss": 0.0,
+ "step": 383
+ },
+ {
+ "epoch": 5.261467889908257,
+ "grad_norm": 0.007865616120398045,
+ "learning_rate": 2.53475987763295e-07,
+ "loss": 0.0001,
+ "step": 384
+ },
+ {
+ "epoch": 5.275229357798165,
+ "grad_norm": 0.019759003072977066,
+ "learning_rate": 2.431974208322191e-07,
+ "loss": 0.0,
+ "step": 385
+ },
+ {
+ "epoch": 5.2889908256880735,
+ "grad_norm": 0.012759661301970482,
+ "learning_rate": 2.3312092951504357e-07,
+ "loss": 0.0,
+ "step": 386
+ },
+ {
+ "epoch": 5.302752293577981,
+ "grad_norm": 0.04186076670885086,
+ "learning_rate": 2.2324741606754629e-07,
+ "loss": 0.0001,
+ "step": 387
+ },
+ {
+ "epoch": 5.31651376146789,
+ "grad_norm": 0.0069040898233652115,
+ "learning_rate": 2.135777645707318e-07,
+ "loss": 0.0,
+ "step": 388
+ },
+ {
+ "epoch": 5.330275229357798,
+ "grad_norm": 0.007877347990870476,
+ "learning_rate": 2.041128408516696e-07,
+ "loss": 0.0,
+ "step": 389
+ },
+ {
+ "epoch": 5.344036697247707,
+ "grad_norm": 0.006059460341930389,
+ "learning_rate": 1.9485349240596613e-07,
+ "loss": 0.0,
+ "step": 390
+ },
+ {
+ "epoch": 5.3577981651376145,
+ "grad_norm": 0.004952798131853342,
+ "learning_rate": 1.8580054832188055e-07,
+ "loss": 0.0,
+ "step": 391
+ },
+ {
+ "epoch": 5.371559633027523,
+ "grad_norm": 0.008702976629137993,
+ "learning_rate": 1.7695481920608716e-07,
+ "loss": 0.0,
+ "step": 392
+ },
+ {
+ "epoch": 5.385321100917431,
+ "grad_norm": 0.004972483497112989,
+ "learning_rate": 1.683170971110934e-07,
+ "loss": 0.0,
+ "step": 393
+ },
+ {
+ "epoch": 5.39908256880734,
+ "grad_norm": 0.00900796614587307,
+ "learning_rate": 1.5988815546431807e-07,
+ "loss": 0.0,
+ "step": 394
+ },
+ {
+ "epoch": 5.412844036697248,
+ "grad_norm": 0.004692474380135536,
+ "learning_rate": 1.5166874899884054e-07,
+ "loss": 0.0,
+ "step": 395
+ },
+ {
+ "epoch": 5.426605504587156,
+ "grad_norm": 0.004459747113287449,
+ "learning_rate": 1.4365961368581844e-07,
+ "loss": 0.0,
+ "step": 396
+ },
+ {
+ "epoch": 5.440366972477064,
+ "grad_norm": 0.008168648928403854,
+ "learning_rate": 1.3586146666858923e-07,
+ "loss": 0.0,
+ "step": 397
+ },
+ {
+ "epoch": 5.454128440366972,
+ "grad_norm": 0.005387315060943365,
+ "learning_rate": 1.2827500619845918e-07,
+ "loss": 0.0,
+ "step": 398
+ },
+ {
+ "epoch": 5.467889908256881,
+ "grad_norm": 0.01771816611289978,
+ "learning_rate": 1.2090091157217653e-07,
+ "loss": 0.0001,
+ "step": 399
+ },
+ {
+ "epoch": 5.481651376146789,
+ "grad_norm": 0.014040575362741947,
+ "learning_rate": 1.137398430711123e-07,
+ "loss": 0.0,
+ "step": 400
+ },
+ {
+ "epoch": 5.495412844036697,
+ "grad_norm": 0.008113729767501354,
+ "learning_rate": 1.0679244190213378e-07,
+ "loss": 0.0,
+ "step": 401
+ },
+ {
+ "epoch": 5.509174311926605,
+ "grad_norm": 0.03784846141934395,
+ "learning_rate": 1.0005933014019309e-07,
+ "loss": 0.0,
+ "step": 402
+ },
+ {
+ "epoch": 5.522935779816514,
+ "grad_norm": 0.0066595966927707195,
+ "learning_rate": 9.354111067262584e-08,
+ "loss": 0.0,
+ "step": 403
+ },
+ {
+ "epoch": 5.536697247706422,
+ "grad_norm": 0.008088004775345325,
+ "learning_rate": 8.723836714516681e-08,
+ "loss": 0.0,
+ "step": 404
+ },
+ {
+ "epoch": 5.5504587155963305,
+ "grad_norm": 0.00842777919024229,
+ "learning_rate": 8.115166390969126e-08,
+ "loss": 0.0,
+ "step": 405
+ },
+ {
+ "epoch": 5.564220183486238,
+ "grad_norm": 0.006161174736917019,
+ "learning_rate": 7.528154597368192e-08,
+ "loss": 0.0,
+ "step": 406
+ },
+ {
+ "epoch": 5.577981651376147,
+ "grad_norm": 0.0038669563364237547,
+ "learning_rate": 6.962853895142924e-08,
+ "loss": 0.0,
+ "step": 407
+ },
+ {
+ "epoch": 5.591743119266055,
+ "grad_norm": 0.009395565837621689,
+ "learning_rate": 6.419314901696671e-08,
+ "loss": 0.0001,
+ "step": 408
+ },
+ {
+ "epoch": 5.605504587155964,
+ "grad_norm": 0.01081535778939724,
+ "learning_rate": 5.897586285874751e-08,
+ "loss": 0.0,
+ "step": 409
+ },
+ {
+ "epoch": 5.6192660550458715,
+ "grad_norm": 0.010825342498719692,
+ "learning_rate": 5.3977147636068425e-08,
+ "loss": 0.0,
+ "step": 410
+ },
+ {
+ "epoch": 5.63302752293578,
+ "grad_norm": 0.014875766821205616,
+ "learning_rate": 4.919745093723588e-08,
+ "loss": 0.0001,
+ "step": 411
+ },
+ {
+ "epoch": 5.646788990825688,
+ "grad_norm": 0.0037493661511689425,
+ "learning_rate": 4.4637200739493514e-08,
+ "loss": 0.0,
+ "step": 412
+ },
+ {
+ "epoch": 5.660550458715596,
+ "grad_norm": 0.010110544972121716,
+ "learning_rate": 4.0296805370696466e-08,
+ "loss": 0.0001,
+ "step": 413
+ },
+ {
+ "epoch": 5.674311926605505,
+ "grad_norm": 0.011753179132938385,
+ "learning_rate": 3.617665347275201e-08,
+ "loss": 0.0,
+ "step": 414
+ },
+ {
+ "epoch": 5.6880733944954125,
+ "grad_norm": 0.005677060689777136,
+ "learning_rate": 3.227711396682015e-08,
+ "loss": 0.0,
+ "step": 415
+ },
+ {
+ "epoch": 5.701834862385321,
+ "grad_norm": 0.008238662034273148,
+ "learning_rate": 2.8598536020278678e-08,
+ "loss": 0.0,
+ "step": 416
+ },
+ {
+ "epoch": 5.715596330275229,
+ "grad_norm": 0.0038856768514961004,
+ "learning_rate": 2.5141249015459833e-08,
+ "loss": 0.0,
+ "step": 417
+ },
+ {
+ "epoch": 5.729357798165138,
+ "grad_norm": 0.03202905133366585,
+ "learning_rate": 2.1905562520156686e-08,
+ "loss": 0.0001,
+ "step": 418
+ },
+ {
+ "epoch": 5.743119266055046,
+ "grad_norm": 0.014867526479065418,
+ "learning_rate": 1.8891766259904188e-08,
+ "loss": 0.0,
+ "step": 419
+ },
+ {
+ "epoch": 5.756880733944954,
+ "grad_norm": 0.00256799696944654,
+ "learning_rate": 1.6100130092037704e-08,
+ "loss": 0.0001,
+ "step": 420
+ },
+ {
+ "epoch": 5.770642201834862,
+ "grad_norm": 0.030730484053492546,
+ "learning_rate": 1.3530903981528454e-08,
+ "loss": 0.0,
+ "step": 421
+ },
+ {
+ "epoch": 5.784403669724771,
+ "grad_norm": 0.0040238359943032265,
+ "learning_rate": 1.118431797860281e-08,
+ "loss": 0.0,
+ "step": 422
+ },
+ {
+ "epoch": 5.798165137614679,
+ "grad_norm": 0.00670271972194314,
+ "learning_rate": 9.060582198141822e-09,
+ "loss": 0.0001,
+ "step": 423
+ },
+ {
+ "epoch": 5.8119266055045875,
+ "grad_norm": 0.012652000412344933,
+ "learning_rate": 7.159886800869875e-09,
+ "loss": 0.0,
+ "step": 424
+ },
+ {
+ "epoch": 5.825688073394495,
+ "grad_norm": 0.011578808538615704,
+ "learning_rate": 5.482401976325269e-09,
+ "loss": 0.0,
+ "step": 425
+ },
+ {
+ "epoch": 5.839449541284404,
+ "grad_norm": 0.007577781565487385,
+ "learning_rate": 4.028277927621838e-09,
+ "loss": 0.0,
+ "step": 426
+ },
+ {
+ "epoch": 5.853211009174312,
+ "grad_norm": 0.01178122777491808,
+ "learning_rate": 2.7976448580008252e-09,
+ "loss": 0.0,
+ "step": 427
+ },
+ {
+ "epoch": 5.86697247706422,
+ "grad_norm": 0.01295078918337822,
+ "learning_rate": 1.7906129591713228e-09,
+ "loss": 0.0,
+ "step": 428
+ },
+ {
+ "epoch": 5.8807339449541285,
+ "grad_norm": 0.0027716464828699827,
+ "learning_rate": 1.0072724014437153e-09,
+ "loss": 0.0,
+ "step": 429
+ },
+ {
+ "epoch": 5.894495412844036,
+ "grad_norm": 0.014433865435421467,
+ "learning_rate": 4.476933256555849e-10,
+ "loss": 0.0001,
+ "step": 430
+ },
+ {
+ "epoch": 5.908256880733945,
+ "grad_norm": 0.013054312206804752,
+ "learning_rate": 1.1192583689256797e-10,
+ "loss": 0.0,
+ "step": 431
+ },
+ {
+ "epoch": 5.922018348623853,
+ "grad_norm": 0.007434073835611343,
+ "learning_rate": 0.0,
+ "loss": 0.0,
+ "step": 432
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 432,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 72,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": true
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 2.7668049020114174e+19,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-432/training_args.bin b/checkpoint-432/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6b48f4b52f5bfff81ec8534f6510460a8014f336
--- /dev/null
+++ b/checkpoint-432/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cbafef0713d4b794ca3a92a04d378baaf3fa7647c9af95948bfb2ef7c0e02eda
+size 7928
diff --git a/checkpoint-432/zero_to_fp32.py b/checkpoint-432/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-432/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/checkpoint-72/README.md b/checkpoint-72/README.md
new file mode 100644
index 0000000000000000000000000000000000000000..1b184114a0c28ed3e4c082c18486736dc818166d
--- /dev/null
+++ b/checkpoint-72/README.md
@@ -0,0 +1,202 @@
+---
+base_model: meta-llama/Llama-3.3-70B-Instruct
+library_name: peft
+---
+
+# Model Card for Model ID
+
+
+
+
+
+## Model Details
+
+### Model Description
+
+
+
+
+
+- **Developed by:** [More Information Needed]
+- **Funded by [optional]:** [More Information Needed]
+- **Shared by [optional]:** [More Information Needed]
+- **Model type:** [More Information Needed]
+- **Language(s) (NLP):** [More Information Needed]
+- **License:** [More Information Needed]
+- **Finetuned from model [optional]:** [More Information Needed]
+
+### Model Sources [optional]
+
+
+
+- **Repository:** [More Information Needed]
+- **Paper [optional]:** [More Information Needed]
+- **Demo [optional]:** [More Information Needed]
+
+## Uses
+
+
+
+### Direct Use
+
+
+
+[More Information Needed]
+
+### Downstream Use [optional]
+
+
+
+[More Information Needed]
+
+### Out-of-Scope Use
+
+
+
+[More Information Needed]
+
+## Bias, Risks, and Limitations
+
+
+
+[More Information Needed]
+
+### Recommendations
+
+
+
+Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
+
+## How to Get Started with the Model
+
+Use the code below to get started with the model.
+
+[More Information Needed]
+
+## Training Details
+
+### Training Data
+
+
+
+[More Information Needed]
+
+### Training Procedure
+
+
+
+#### Preprocessing [optional]
+
+[More Information Needed]
+
+
+#### Training Hyperparameters
+
+- **Training regime:** [More Information Needed]
+
+#### Speeds, Sizes, Times [optional]
+
+
+
+[More Information Needed]
+
+## Evaluation
+
+
+
+### Testing Data, Factors & Metrics
+
+#### Testing Data
+
+
+
+[More Information Needed]
+
+#### Factors
+
+
+
+[More Information Needed]
+
+#### Metrics
+
+
+
+[More Information Needed]
+
+### Results
+
+[More Information Needed]
+
+#### Summary
+
+
+
+## Model Examination [optional]
+
+
+
+[More Information Needed]
+
+## Environmental Impact
+
+
+
+Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
+
+- **Hardware Type:** [More Information Needed]
+- **Hours used:** [More Information Needed]
+- **Cloud Provider:** [More Information Needed]
+- **Compute Region:** [More Information Needed]
+- **Carbon Emitted:** [More Information Needed]
+
+## Technical Specifications [optional]
+
+### Model Architecture and Objective
+
+[More Information Needed]
+
+### Compute Infrastructure
+
+[More Information Needed]
+
+#### Hardware
+
+[More Information Needed]
+
+#### Software
+
+[More Information Needed]
+
+## Citation [optional]
+
+
+
+**BibTeX:**
+
+[More Information Needed]
+
+**APA:**
+
+[More Information Needed]
+
+## Glossary [optional]
+
+
+
+[More Information Needed]
+
+## More Information [optional]
+
+[More Information Needed]
+
+## Model Card Authors [optional]
+
+[More Information Needed]
+
+## Model Card Contact
+
+[More Information Needed]
+### Framework versions
+
+- PEFT 0.15.0
\ No newline at end of file
diff --git a/checkpoint-72/adapter_config.json b/checkpoint-72/adapter_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..538b369b5129fb972c782e6ccfd589054540e1af
--- /dev/null
+++ b/checkpoint-72/adapter_config.json
@@ -0,0 +1,42 @@
+{
+ "alpha_pattern": {},
+ "auto_mapping": null,
+ "base_model_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "bias": "none",
+ "corda_config": null,
+ "eva_config": null,
+ "exclude_modules": null,
+ "fan_in_fan_out": null,
+ "inference_mode": true,
+ "init_lora_weights": true,
+ "layer_replication": null,
+ "layers_pattern": null,
+ "layers_to_transform": null,
+ "loftq_config": {},
+ "lora_alpha": 512,
+ "lora_bias": false,
+ "lora_dropout": 0.05,
+ "megatron_config": null,
+ "megatron_core": "megatron.core",
+ "modules_to_save": [
+ "embed_tokens",
+ "lm_head"
+ ],
+ "peft_type": "LORA",
+ "r": 256,
+ "rank_pattern": {},
+ "revision": null,
+ "target_modules": [
+ "o_proj",
+ "q_proj",
+ "v_proj",
+ "k_proj",
+ "up_proj",
+ "down_proj",
+ "gate_proj"
+ ],
+ "task_type": "CAUSAL_LM",
+ "trainable_token_indices": null,
+ "use_dora": false,
+ "use_rslora": false
+}
\ No newline at end of file
diff --git a/checkpoint-72/adapter_model.safetensors b/checkpoint-72/adapter_model.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2390d6443fca628f291b6a4c78ef3866f3f032d1
--- /dev/null
+++ b/checkpoint-72/adapter_model.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0e59d756350d18e6dda83abde8c16e70204e69b42ca3d99c9a9eea031438ecee
+size 10829849744
diff --git a/checkpoint-72/global_step72/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt b/checkpoint-72/global_step72/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..761c64d732d7d1ddab58185c0a720ce2913a0c17
--- /dev/null
+++ b/checkpoint-72/global_step72/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d03f5e4d8885448d0db7cf56b9bb6bf3d939fb4ffb79ddfe4d57c969f84a38d7
+size 21659418140
diff --git a/checkpoint-72/global_step72/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt b/checkpoint-72/global_step72/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..41be91501bc07076e460f39c09b143c374b5de5d
--- /dev/null
+++ b/checkpoint-72/global_step72/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:857e9abdb5387f502112f418e03590c23b0f24d96366a5451a63363aee2263cb
+size 21659457372
diff --git a/checkpoint-72/global_step72/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt b/checkpoint-72/global_step72/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..0c5189bb8d9eb1eb248c4e778c31bf2e7947a81b
--- /dev/null
+++ b/checkpoint-72/global_step72/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e76f1f6bce935dd1e6e2820428ca82ac521917f5b3af1bd9d8c182eece078d79
+size 21659417820
diff --git a/checkpoint-72/global_step72/mp_rank_00_model_states.pt b/checkpoint-72/global_step72/mp_rank_00_model_states.pt
new file mode 100644
index 0000000000000000000000000000000000000000..2b87f19d9ffd7bacdbe749d19e9d8b226e41a335
--- /dev/null
+++ b/checkpoint-72/global_step72/mp_rank_00_model_states.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5e0028a41ee9843f674324299de3c30c03ea0fcc4304347d188a1365f9e49cf6
+size 11918643933
diff --git a/checkpoint-72/latest b/checkpoint-72/latest
new file mode 100644
index 0000000000000000000000000000000000000000..f3ff0f3ef57eac4f36c543b2d7ef78ca727041bd
--- /dev/null
+++ b/checkpoint-72/latest
@@ -0,0 +1 @@
+global_step72
\ No newline at end of file
diff --git a/checkpoint-72/rng_state_0.pth b/checkpoint-72/rng_state_0.pth
new file mode 100644
index 0000000000000000000000000000000000000000..6e03436dd77f0f742b73e3f601a58d05364ee48b
--- /dev/null
+++ b/checkpoint-72/rng_state_0.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:541a247a0499946942d469144d4609ab54f406a01327defecf24e55cce3eaaff
+size 14768
diff --git a/checkpoint-72/rng_state_1.pth b/checkpoint-72/rng_state_1.pth
new file mode 100644
index 0000000000000000000000000000000000000000..0d2065fa0d9a503d409eaed77bd3dafcec8c6e51
--- /dev/null
+++ b/checkpoint-72/rng_state_1.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:55f595d3bc4cf74ef1c4bf07834b2d3c1153e4c96ec66ee50cd533cd68d3f2be
+size 14768
diff --git a/checkpoint-72/rng_state_2.pth b/checkpoint-72/rng_state_2.pth
new file mode 100644
index 0000000000000000000000000000000000000000..b59ecfae7f98fa951c562700fd917c39af7c9ffe
--- /dev/null
+++ b/checkpoint-72/rng_state_2.pth
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a141ba5106d9cb0d6d4ea1db081a08d8a6182e2ca548def74038dc2ab25e5894
+size 14768
diff --git a/checkpoint-72/scheduler.pt b/checkpoint-72/scheduler.pt
new file mode 100644
index 0000000000000000000000000000000000000000..324d6e5ba59dc80aafdba02fe2ebc9eac737c54b
--- /dev/null
+++ b/checkpoint-72/scheduler.pt
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:14d3ad851fc136efe822990f8b99840e98b2ff20804944bcf122f2cafb45ed1f
+size 1064
diff --git a/checkpoint-72/special_tokens_map.json b/checkpoint-72/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/checkpoint-72/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/checkpoint-72/tokenizer.json b/checkpoint-72/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/checkpoint-72/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/checkpoint-72/tokenizer_config.json b/checkpoint-72/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/checkpoint-72/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}
diff --git a/checkpoint-72/trainer_state.json b/checkpoint-72/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..0f346d65ba7a654dcfe040f3cccb898a1d3db5bf
--- /dev/null
+++ b/checkpoint-72/trainer_state.json
@@ -0,0 +1,537 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 0.9908256880733946,
+ "eval_steps": 500,
+ "global_step": 72,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.013761467889908258,
+ "grad_norm": 38.02450942993164,
+ "learning_rate": 5.0000000000000004e-08,
+ "loss": 3.125,
+ "step": 1
+ },
+ {
+ "epoch": 0.027522935779816515,
+ "grad_norm": 37.864768981933594,
+ "learning_rate": 1.0000000000000001e-07,
+ "loss": 3.0998,
+ "step": 2
+ },
+ {
+ "epoch": 0.04128440366972477,
+ "grad_norm": 38.34700012207031,
+ "learning_rate": 1.5000000000000002e-07,
+ "loss": 3.1533,
+ "step": 3
+ },
+ {
+ "epoch": 0.05504587155963303,
+ "grad_norm": 38.33641815185547,
+ "learning_rate": 2.0000000000000002e-07,
+ "loss": 3.1542,
+ "step": 4
+ },
+ {
+ "epoch": 0.06880733944954129,
+ "grad_norm": 38.064449310302734,
+ "learning_rate": 2.5000000000000004e-07,
+ "loss": 3.1153,
+ "step": 5
+ },
+ {
+ "epoch": 0.08256880733944955,
+ "grad_norm": 37.92089080810547,
+ "learning_rate": 3.0000000000000004e-07,
+ "loss": 3.0867,
+ "step": 6
+ },
+ {
+ "epoch": 0.0963302752293578,
+ "grad_norm": 38.120323181152344,
+ "learning_rate": 3.5000000000000004e-07,
+ "loss": 3.093,
+ "step": 7
+ },
+ {
+ "epoch": 0.11009174311926606,
+ "grad_norm": 38.47222900390625,
+ "learning_rate": 4.0000000000000003e-07,
+ "loss": 3.1056,
+ "step": 8
+ },
+ {
+ "epoch": 0.12385321100917432,
+ "grad_norm": 38.013702392578125,
+ "learning_rate": 4.5000000000000003e-07,
+ "loss": 3.0474,
+ "step": 9
+ },
+ {
+ "epoch": 0.13761467889908258,
+ "grad_norm": 38.17593002319336,
+ "learning_rate": 5.000000000000001e-07,
+ "loss": 3.0264,
+ "step": 10
+ },
+ {
+ "epoch": 0.15137614678899083,
+ "grad_norm": 38.60066604614258,
+ "learning_rate": 5.5e-07,
+ "loss": 2.9404,
+ "step": 11
+ },
+ {
+ "epoch": 0.1651376146788991,
+ "grad_norm": 38.83498764038086,
+ "learning_rate": 6.000000000000001e-07,
+ "loss": 2.9571,
+ "step": 12
+ },
+ {
+ "epoch": 0.17889908256880735,
+ "grad_norm": 38.942047119140625,
+ "learning_rate": 6.5e-07,
+ "loss": 2.8849,
+ "step": 13
+ },
+ {
+ "epoch": 0.1926605504587156,
+ "grad_norm": 38.0286865234375,
+ "learning_rate": 7.000000000000001e-07,
+ "loss": 2.7486,
+ "step": 14
+ },
+ {
+ "epoch": 0.20642201834862386,
+ "grad_norm": 38.31856155395508,
+ "learning_rate": 7.5e-07,
+ "loss": 2.6876,
+ "step": 15
+ },
+ {
+ "epoch": 0.22018348623853212,
+ "grad_norm": 38.124759674072266,
+ "learning_rate": 8.000000000000001e-07,
+ "loss": 2.5992,
+ "step": 16
+ },
+ {
+ "epoch": 0.23394495412844038,
+ "grad_norm": 36.59762191772461,
+ "learning_rate": 8.500000000000001e-07,
+ "loss": 2.4063,
+ "step": 17
+ },
+ {
+ "epoch": 0.24770642201834864,
+ "grad_norm": 36.63874435424805,
+ "learning_rate": 9.000000000000001e-07,
+ "loss": 2.3109,
+ "step": 18
+ },
+ {
+ "epoch": 0.26146788990825687,
+ "grad_norm": 36.768577575683594,
+ "learning_rate": 9.500000000000001e-07,
+ "loss": 2.1677,
+ "step": 19
+ },
+ {
+ "epoch": 0.27522935779816515,
+ "grad_norm": 36.187137603759766,
+ "learning_rate": 1.0000000000000002e-06,
+ "loss": 1.9551,
+ "step": 20
+ },
+ {
+ "epoch": 0.2889908256880734,
+ "grad_norm": 35.55617141723633,
+ "learning_rate": 1.0500000000000001e-06,
+ "loss": 1.8053,
+ "step": 21
+ },
+ {
+ "epoch": 0.30275229357798167,
+ "grad_norm": 34.60952377319336,
+ "learning_rate": 1.1e-06,
+ "loss": 1.5654,
+ "step": 22
+ },
+ {
+ "epoch": 0.3165137614678899,
+ "grad_norm": 33.69620895385742,
+ "learning_rate": 1.1500000000000002e-06,
+ "loss": 1.3454,
+ "step": 23
+ },
+ {
+ "epoch": 0.3302752293577982,
+ "grad_norm": 34.33642578125,
+ "learning_rate": 1.2000000000000002e-06,
+ "loss": 1.2417,
+ "step": 24
+ },
+ {
+ "epoch": 0.3440366972477064,
+ "grad_norm": 31.23066520690918,
+ "learning_rate": 1.25e-06,
+ "loss": 0.9839,
+ "step": 25
+ },
+ {
+ "epoch": 0.3577981651376147,
+ "grad_norm": 25.810237884521484,
+ "learning_rate": 1.3e-06,
+ "loss": 0.793,
+ "step": 26
+ },
+ {
+ "epoch": 0.37155963302752293,
+ "grad_norm": 23.06692886352539,
+ "learning_rate": 1.3500000000000002e-06,
+ "loss": 0.6082,
+ "step": 27
+ },
+ {
+ "epoch": 0.3853211009174312,
+ "grad_norm": 19.828439712524414,
+ "learning_rate": 1.4000000000000001e-06,
+ "loss": 0.4845,
+ "step": 28
+ },
+ {
+ "epoch": 0.39908256880733944,
+ "grad_norm": 14.150300025939941,
+ "learning_rate": 1.45e-06,
+ "loss": 0.348,
+ "step": 29
+ },
+ {
+ "epoch": 0.41284403669724773,
+ "grad_norm": 9.044266700744629,
+ "learning_rate": 1.5e-06,
+ "loss": 0.2516,
+ "step": 30
+ },
+ {
+ "epoch": 0.42660550458715596,
+ "grad_norm": 5.704404354095459,
+ "learning_rate": 1.5500000000000002e-06,
+ "loss": 0.177,
+ "step": 31
+ },
+ {
+ "epoch": 0.44036697247706424,
+ "grad_norm": 3.2953503131866455,
+ "learning_rate": 1.6000000000000001e-06,
+ "loss": 0.1391,
+ "step": 32
+ },
+ {
+ "epoch": 0.4541284403669725,
+ "grad_norm": 2.453219413757324,
+ "learning_rate": 1.6500000000000003e-06,
+ "loss": 0.0982,
+ "step": 33
+ },
+ {
+ "epoch": 0.46788990825688076,
+ "grad_norm": 2.0325512886047363,
+ "learning_rate": 1.7000000000000002e-06,
+ "loss": 0.0807,
+ "step": 34
+ },
+ {
+ "epoch": 0.481651376146789,
+ "grad_norm": 1.6322681903839111,
+ "learning_rate": 1.75e-06,
+ "loss": 0.0725,
+ "step": 35
+ },
+ {
+ "epoch": 0.4954128440366973,
+ "grad_norm": 0.9713364839553833,
+ "learning_rate": 1.8000000000000001e-06,
+ "loss": 0.067,
+ "step": 36
+ },
+ {
+ "epoch": 0.5091743119266054,
+ "grad_norm": 0.7980225682258606,
+ "learning_rate": 1.85e-06,
+ "loss": 0.0582,
+ "step": 37
+ },
+ {
+ "epoch": 0.5229357798165137,
+ "grad_norm": 1.0616590976715088,
+ "learning_rate": 1.9000000000000002e-06,
+ "loss": 0.0562,
+ "step": 38
+ },
+ {
+ "epoch": 0.536697247706422,
+ "grad_norm": 1.053462028503418,
+ "learning_rate": 1.9500000000000004e-06,
+ "loss": 0.0537,
+ "step": 39
+ },
+ {
+ "epoch": 0.5504587155963303,
+ "grad_norm": 0.9452660083770752,
+ "learning_rate": 2.0000000000000003e-06,
+ "loss": 0.0602,
+ "step": 40
+ },
+ {
+ "epoch": 0.5642201834862385,
+ "grad_norm": 0.830368161201477,
+ "learning_rate": 2.05e-06,
+ "loss": 0.0549,
+ "step": 41
+ },
+ {
+ "epoch": 0.5779816513761468,
+ "grad_norm": 0.5791187882423401,
+ "learning_rate": 2.1000000000000002e-06,
+ "loss": 0.0479,
+ "step": 42
+ },
+ {
+ "epoch": 0.591743119266055,
+ "grad_norm": 0.44175243377685547,
+ "learning_rate": 2.15e-06,
+ "loss": 0.0461,
+ "step": 43
+ },
+ {
+ "epoch": 0.6055045871559633,
+ "grad_norm": 0.37655699253082275,
+ "learning_rate": 2.2e-06,
+ "loss": 0.043,
+ "step": 44
+ },
+ {
+ "epoch": 0.6192660550458715,
+ "grad_norm": 0.34382495284080505,
+ "learning_rate": 2.25e-06,
+ "loss": 0.0454,
+ "step": 45
+ },
+ {
+ "epoch": 0.6330275229357798,
+ "grad_norm": 0.5047216415405273,
+ "learning_rate": 2.3000000000000004e-06,
+ "loss": 0.0437,
+ "step": 46
+ },
+ {
+ "epoch": 0.6467889908256881,
+ "grad_norm": 0.6318779587745667,
+ "learning_rate": 2.35e-06,
+ "loss": 0.0468,
+ "step": 47
+ },
+ {
+ "epoch": 0.6605504587155964,
+ "grad_norm": 0.5135455131530762,
+ "learning_rate": 2.4000000000000003e-06,
+ "loss": 0.0494,
+ "step": 48
+ },
+ {
+ "epoch": 0.6743119266055045,
+ "grad_norm": 0.4802612066268921,
+ "learning_rate": 2.4500000000000003e-06,
+ "loss": 0.0441,
+ "step": 49
+ },
+ {
+ "epoch": 0.6880733944954128,
+ "grad_norm": 0.6157718300819397,
+ "learning_rate": 2.5e-06,
+ "loss": 0.0398,
+ "step": 50
+ },
+ {
+ "epoch": 0.7018348623853211,
+ "grad_norm": 0.4327130913734436,
+ "learning_rate": 2.55e-06,
+ "loss": 0.0438,
+ "step": 51
+ },
+ {
+ "epoch": 0.7155963302752294,
+ "grad_norm": 0.46133658289909363,
+ "learning_rate": 2.6e-06,
+ "loss": 0.041,
+ "step": 52
+ },
+ {
+ "epoch": 0.7293577981651376,
+ "grad_norm": 0.5729146003723145,
+ "learning_rate": 2.6500000000000005e-06,
+ "loss": 0.0406,
+ "step": 53
+ },
+ {
+ "epoch": 0.7431192660550459,
+ "grad_norm": 0.32373812794685364,
+ "learning_rate": 2.7000000000000004e-06,
+ "loss": 0.0419,
+ "step": 54
+ },
+ {
+ "epoch": 0.7568807339449541,
+ "grad_norm": 0.29006752371788025,
+ "learning_rate": 2.7500000000000004e-06,
+ "loss": 0.0415,
+ "step": 55
+ },
+ {
+ "epoch": 0.7706422018348624,
+ "grad_norm": 0.31038960814476013,
+ "learning_rate": 2.8000000000000003e-06,
+ "loss": 0.0344,
+ "step": 56
+ },
+ {
+ "epoch": 0.7844036697247706,
+ "grad_norm": 0.2324836701154709,
+ "learning_rate": 2.85e-06,
+ "loss": 0.0374,
+ "step": 57
+ },
+ {
+ "epoch": 0.7981651376146789,
+ "grad_norm": 0.5083625912666321,
+ "learning_rate": 2.9e-06,
+ "loss": 0.0324,
+ "step": 58
+ },
+ {
+ "epoch": 0.8119266055045872,
+ "grad_norm": 0.2873130142688751,
+ "learning_rate": 2.95e-06,
+ "loss": 0.0403,
+ "step": 59
+ },
+ {
+ "epoch": 0.8256880733944955,
+ "grad_norm": 0.437663197517395,
+ "learning_rate": 3e-06,
+ "loss": 0.0368,
+ "step": 60
+ },
+ {
+ "epoch": 0.8394495412844036,
+ "grad_norm": 0.5645247101783752,
+ "learning_rate": 3.05e-06,
+ "loss": 0.0386,
+ "step": 61
+ },
+ {
+ "epoch": 0.8532110091743119,
+ "grad_norm": 0.40374210476875305,
+ "learning_rate": 3.1000000000000004e-06,
+ "loss": 0.0425,
+ "step": 62
+ },
+ {
+ "epoch": 0.8669724770642202,
+ "grad_norm": 0.46468955278396606,
+ "learning_rate": 3.1500000000000003e-06,
+ "loss": 0.0323,
+ "step": 63
+ },
+ {
+ "epoch": 0.8807339449541285,
+ "grad_norm": 0.29952895641326904,
+ "learning_rate": 3.2000000000000003e-06,
+ "loss": 0.0325,
+ "step": 64
+ },
+ {
+ "epoch": 0.8944954128440367,
+ "grad_norm": 0.3678436279296875,
+ "learning_rate": 3.2500000000000002e-06,
+ "loss": 0.036,
+ "step": 65
+ },
+ {
+ "epoch": 0.908256880733945,
+ "grad_norm": 0.5068934559822083,
+ "learning_rate": 3.3000000000000006e-06,
+ "loss": 0.0357,
+ "step": 66
+ },
+ {
+ "epoch": 0.9220183486238532,
+ "grad_norm": 0.2723177671432495,
+ "learning_rate": 3.3500000000000005e-06,
+ "loss": 0.0333,
+ "step": 67
+ },
+ {
+ "epoch": 0.9357798165137615,
+ "grad_norm": 0.41696834564208984,
+ "learning_rate": 3.4000000000000005e-06,
+ "loss": 0.0347,
+ "step": 68
+ },
+ {
+ "epoch": 0.9495412844036697,
+ "grad_norm": 0.2582981288433075,
+ "learning_rate": 3.45e-06,
+ "loss": 0.0283,
+ "step": 69
+ },
+ {
+ "epoch": 0.963302752293578,
+ "grad_norm": 0.40648311376571655,
+ "learning_rate": 3.5e-06,
+ "loss": 0.0293,
+ "step": 70
+ },
+ {
+ "epoch": 0.9770642201834863,
+ "grad_norm": 0.4149394631385803,
+ "learning_rate": 3.5500000000000003e-06,
+ "loss": 0.0311,
+ "step": 71
+ },
+ {
+ "epoch": 0.9908256880733946,
+ "grad_norm": 0.3800952136516571,
+ "learning_rate": 3.6000000000000003e-06,
+ "loss": 0.0346,
+ "step": 72
+ }
+ ],
+ "logging_steps": 1,
+ "max_steps": 432,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 6,
+ "save_steps": 72,
+ "stateful_callbacks": {
+ "TrainerControl": {
+ "args": {
+ "should_epoch_stop": false,
+ "should_evaluate": false,
+ "should_log": false,
+ "should_save": true,
+ "should_training_stop": false
+ },
+ "attributes": {}
+ }
+ },
+ "total_flos": 4.631891949769458e+18,
+ "train_batch_size": 4,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/checkpoint-72/training_args.bin b/checkpoint-72/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..6b48f4b52f5bfff81ec8534f6510460a8014f336
--- /dev/null
+++ b/checkpoint-72/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:cbafef0713d4b794ca3a92a04d378baaf3fa7647c9af95948bfb2ef7c0e02eda
+size 7928
diff --git a/checkpoint-72/zero_to_fp32.py b/checkpoint-72/zero_to_fp32.py
new file mode 100644
index 0000000000000000000000000000000000000000..24cc342e78d1a006c782b3a4cd68d9ce786d8fd8
--- /dev/null
+++ b/checkpoint-72/zero_to_fp32.py
@@ -0,0 +1,604 @@
+#!/usr/bin/env python
+
+# Copyright (c) Microsoft Corporation.
+# SPDX-License-Identifier: Apache-2.0
+
+# DeepSpeed Team
+
+# This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
+# copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
+# the future. Once extracted, the weights don't require DeepSpeed and can be used in any
+# application.
+#
+# example: python zero_to_fp32.py . pytorch_model.bin
+
+import argparse
+import torch
+import glob
+import math
+import os
+import re
+from collections import OrderedDict
+from dataclasses import dataclass
+
+# while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
+# DeepSpeed data structures it has to be available in the current python environment.
+from deepspeed.utils import logger
+from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
+
+
+@dataclass
+class zero_model_state:
+ buffers: dict()
+ param_shapes: dict()
+ shared_params: list
+ ds_version: int
+ frozen_param_shapes: dict()
+ frozen_param_fragments: dict()
+
+
+debug = 0
+
+# load to cpu
+device = torch.device('cpu')
+
+
+def atoi(text):
+ return int(text) if text.isdigit() else text
+
+
+def natural_keys(text):
+ '''
+ alist.sort(key=natural_keys) sorts in human order
+ http://nedbatchelder.com/blog/200712/human_sorting.html
+ (See Toothy's implementation in the comments)
+ '''
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
+
+
+def get_model_state_file(checkpoint_dir, zero_stage):
+ if not os.path.isdir(checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
+
+ # there should be only one file
+ if zero_stage <= 2:
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
+ elif zero_stage == 3:
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
+
+ if not os.path.exists(file):
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
+
+ return file
+
+
+def get_checkpoint_files(checkpoint_dir, glob_pattern):
+ # XXX: need to test that this simple glob rule works for multi-node setup too
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
+
+ if len(ckpt_files) == 0:
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
+
+ return ckpt_files
+
+
+def get_optim_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
+
+
+def get_model_state_files(checkpoint_dir):
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
+
+
+def parse_model_states(files):
+ zero_model_states = []
+ for file in files:
+ state_dict = torch.load(file, map_location=device)
+
+ if BUFFER_NAMES not in state_dict:
+ raise ValueError(f"{file} is not a model state checkpoint")
+ buffer_names = state_dict[BUFFER_NAMES]
+ if debug:
+ print("Found buffers:", buffer_names)
+
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
+ param_shapes = state_dict[PARAM_SHAPES]
+
+ # collect parameters that are included in param_shapes
+ param_names = []
+ for s in param_shapes:
+ for name in s.keys():
+ param_names.append(name)
+
+ # update with frozen parameters
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
+ if frozen_param_shapes is not None:
+ if debug:
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
+ param_names += list(frozen_param_shapes.keys())
+
+ # handle shared params
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
+
+ ds_version = state_dict.get(DS_VERSION, None)
+
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
+
+ z_model_state = zero_model_state(buffers=buffers,
+ param_shapes=param_shapes,
+ shared_params=shared_params,
+ ds_version=ds_version,
+ frozen_param_shapes=frozen_param_shapes,
+ frozen_param_fragments=frozen_param_fragments)
+ zero_model_states.append(z_model_state)
+
+ return zero_model_states
+
+
+def parse_optim_states(files, ds_checkpoint_dir):
+
+ total_files = len(files)
+ state_dicts = []
+ for f in files:
+ state_dict = torch.load(f, map_location=device)
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
+ # and also handle the case where it was already removed by another helper script
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
+ state_dicts.append(state_dict)
+
+ if not ZERO_STAGE in state_dicts[0][OPTIMIZER_STATE_DICT]:
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
+
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
+ # use the max of the partition_count to get the dp world_size.
+
+ if type(world_size) is list:
+ world_size = max(world_size)
+
+ if world_size != total_files:
+ raise ValueError(
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
+ )
+
+ # the groups are named differently in each stage
+ if zero_stage <= 2:
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
+ elif zero_stage == 3:
+ fp32_groups_key = FP32_FLAT_GROUPS
+ else:
+ raise ValueError(f"unknown zero stage {zero_stage}")
+
+ if zero_stage <= 2:
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
+ elif zero_stage == 3:
+ # if there is more than one param group, there will be multiple flattened tensors - one
+ # flattened tensor per group - for simplicity merge them into a single tensor
+ #
+ # XXX: could make the script more memory efficient for when there are multiple groups - it
+ # will require matching the sub-lists of param_shapes for each param group flattened tensor
+
+ fp32_flat_groups = [
+ torch.cat(state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key], 0) for i in range(len(state_dicts))
+ ]
+
+ return zero_stage, world_size, fp32_flat_groups
+
+
+def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
+ """
+ Returns fp32 state_dict reconstructed from ds checkpoint
+
+ Args:
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
+
+ """
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
+
+ optim_files = get_optim_files(ds_checkpoint_dir)
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
+
+ model_files = get_model_state_files(ds_checkpoint_dir)
+
+ zero_model_states = parse_model_states(model_files)
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
+
+ if zero_stage <= 2:
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+ elif zero_stage == 3:
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters)
+
+
+def _zero2_merge_frozen_params(state_dict, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
+
+ if debug:
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ state_dict[name] = frozen_param_fragments[name]
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _has_callable(obj, fn):
+ attr = getattr(obj, fn, None)
+ return callable(attr)
+
+
+def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+
+ # Reconstruction protocol:
+ #
+ # XXX: document this
+
+ if debug:
+ for i in range(world_size):
+ for j in range(len(fp32_flat_groups[0])):
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
+
+ # XXX: memory usage doubles here (zero2)
+ num_param_groups = len(fp32_flat_groups[0])
+ merged_single_partition_of_fp32_groups = []
+ for i in range(num_param_groups):
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
+ avail_numel = sum(
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
+
+ if debug:
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
+ # not asserting if there is a mismatch due to possible padding
+ print(f"Have {avail_numel} numels to process.")
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ total_numel = 0
+ total_params = 0
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
+ offset = 0
+ avail_numel = full_single_fp32_vector.numel()
+ for name, shape in shapes.items():
+
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ if debug:
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
+ offset += unpartitioned_numel
+
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
+ # live optimizer object, so we are checking that the numbers are within the right range
+ align_to = 2 * world_size
+
+ def zero2_align(x):
+ return align_to * math.ceil(x / align_to)
+
+ if debug:
+ print(f"original offset={offset}, avail_numel={avail_numel}")
+
+ offset = zero2_align(offset)
+ avail_numel = zero2_align(avail_numel)
+
+ if debug:
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
+
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def zero3_partitioned_param_info(unpartitioned_numel, world_size):
+ remainder = unpartitioned_numel % world_size
+ padding_numel = (world_size - remainder) if remainder else 0
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
+ return partitioned_numel, padding_numel
+
+
+def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
+ return
+
+ if debug:
+ for i in range(world_size):
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
+
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
+ wanted_params = len(frozen_param_shapes)
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
+ print(f'Frozen params: Have {avail_numel} numels to process.')
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
+
+ total_params = 0
+ total_numel = 0
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
+ total_params += 1
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
+ param_shapes = zero_model_states[0].param_shapes
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
+ # param, re-consolidating each param, while dealing with padding if any
+
+ # merge list of dicts, preserving order
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
+
+ if debug:
+ for i in range(world_size):
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
+
+ wanted_params = len(param_shapes)
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
+ # not asserting if there is a mismatch due to possible padding
+ avail_numel = fp32_flat_groups[0].numel() * world_size
+ print(f"Trainable params: Have {avail_numel} numels to process.")
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
+
+ # params
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
+ # out-of-core computing solution
+ offset = 0
+ total_numel = 0
+ total_params = 0
+ for name, shape in param_shapes.items():
+
+ unpartitioned_numel = shape.numel()
+ total_numel += unpartitioned_numel
+ total_params += 1
+
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
+
+ if debug:
+ print(
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
+ )
+
+ # XXX: memory usage doubles here
+ state_dict[name] = torch.cat(
+ tuple(fp32_flat_groups[i].narrow(0, offset, partitioned_numel) for i in range(world_size)),
+ 0).narrow(0, 0, unpartitioned_numel).view(shape)
+ offset += partitioned_numel
+
+ offset *= world_size
+
+ # Sanity check
+ if offset != avail_numel:
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
+
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
+
+
+def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
+ exclude_frozen_parameters):
+ state_dict = OrderedDict()
+
+ # buffers
+ buffers = zero_model_states[0].buffers
+ state_dict.update(buffers)
+ if debug:
+ print(f"added {len(buffers)} buffers")
+
+ if not exclude_frozen_parameters:
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
+
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
+
+ # recover shared parameters
+ for pair in zero_model_states[0].shared_params:
+ if pair[1] in state_dict:
+ state_dict[pair[0]] = state_dict[pair[1]]
+
+ return state_dict
+
+
+def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
+ via a model hub.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+
+ Returns:
+ - pytorch ``state_dict``
+
+ Note: this approach may not work if your application doesn't have sufficient free CPU memory and
+ you may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
+ the checkpoint.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
+ # do the training and checkpoint saving
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
+ model = model.cpu() # move to cpu
+ model.load_state_dict(state_dict)
+ # submit to model hub or save the model to share with others
+
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
+ application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
+
+ """
+ if tag is None:
+ latest_path = os.path.join(checkpoint_dir, 'latest')
+ if os.path.isfile(latest_path):
+ with open(latest_path, 'r') as fd:
+ tag = fd.read().strip()
+ else:
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
+
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
+
+ if not os.path.isdir(ds_checkpoint_dir):
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
+
+ return _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
+
+
+def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir, output_file, tag=None, exclude_frozen_parameters=False):
+ """
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
+
+ Args:
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``output_file``: path to the pytorch fp32 state_dict output file (e.g. path/pytorch_model.bin)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+ - ``exclude_frozen_parameters``: exclude frozen parameters
+ """
+
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag, exclude_frozen_parameters)
+ print(f"Saving fp32 state dict to {output_file}")
+ torch.save(state_dict, output_file)
+
+
+def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
+ """
+ 1. Put the provided model to cpu
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
+ 3. Load it into the provided model
+
+ Args:
+ - ``model``: the model object to update
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
+
+ Returns:
+ - ``model`: modified model
+
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
+ conveniently placed for you in the checkpoint folder.
+
+ A typical usage might be ::
+
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
+ # submit to model hub or save the model to share with others
+
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
+
+ """
+ logger.info(f"Extracting fp32 weights")
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
+
+ logger.info(f"Overwriting model with fp32 weights")
+ model = model.cpu()
+ model.load_state_dict(state_dict, strict=False)
+
+ return model
+
+
+if __name__ == "__main__":
+
+ parser = argparse.ArgumentParser()
+ parser.add_argument("checkpoint_dir",
+ type=str,
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
+ parser.add_argument(
+ "output_file",
+ type=str,
+ help="path to the pytorch fp32 state_dict output file (e.g. path/checkpoint-12/pytorch_model.bin)")
+ parser.add_argument("-t",
+ "--tag",
+ type=str,
+ default=None,
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
+ args = parser.parse_args()
+
+ debug = args.debug
+
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
+ args.output_file,
+ tag=args.tag,
+ exclude_frozen_parameters=args.exclude_frozen_parameters)
diff --git a/config.json b/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..9d764aead97a826f7aae3b9cfcfe2606e1d2eeec
--- /dev/null
+++ b/config.json
@@ -0,0 +1,52 @@
+{
+ "_attn_implementation_autoset": true,
+ "_name_or_path": "meta-llama/Llama-3.3-70B-Instruct",
+ "architectures": [
+ "LlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 128000,
+ "eos_token_id": 128009,
+ "head_dim": 128,
+ "hidden_act": "silu",
+ "hidden_size": 8192,
+ "initializer_range": 0.02,
+ "intermediate_size": 28672,
+ "max_position_embeddings": 131072,
+ "mlp_bias": false,
+ "model_type": "llama",
+ "num_attention_heads": 64,
+ "num_hidden_layers": 80,
+ "num_key_value_heads": 8,
+ "pretraining_tp": 1,
+ "quantization_config": {
+ "_load_in_4bit": true,
+ "_load_in_8bit": false,
+ "bnb_4bit_compute_dtype": "bfloat16",
+ "bnb_4bit_quant_storage": "bfloat16",
+ "bnb_4bit_quant_type": "nf4",
+ "bnb_4bit_use_double_quant": true,
+ "llm_int8_enable_fp32_cpu_offload": false,
+ "llm_int8_has_fp16_weight": false,
+ "llm_int8_skip_modules": null,
+ "llm_int8_threshold": 6.0,
+ "load_in_4bit": true,
+ "load_in_8bit": false,
+ "quant_method": "bitsandbytes"
+ },
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": {
+ "factor": 8.0,
+ "high_freq_factor": 4.0,
+ "low_freq_factor": 1.0,
+ "original_max_position_embeddings": 8192,
+ "rope_type": "llama3"
+ },
+ "rope_theta": 500000.0,
+ "tie_word_embeddings": false,
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.49.0",
+ "use_cache": false,
+ "vocab_size": 128256
+}
diff --git a/special_tokens_map.json b/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..278b7f0f84be865c4687700ee7b3c63d89a51e18
--- /dev/null
+++ b/special_tokens_map.json
@@ -0,0 +1,23 @@
+{
+ "bos_token": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/tokenizer.json b/tokenizer.json
new file mode 100644
index 0000000000000000000000000000000000000000..1c1d8d5c9024994f1d3b00f9662b8dd89ca13cf2
--- /dev/null
+++ b/tokenizer.json
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6b9e4e7fb171f92fd137b777cc2714bf87d11576700a1dcd7a399e7bbe39537b
+size 17209920
diff --git a/tokenizer_config.json b/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..ca91a2ef55f4239a7af81d7c9abb05f53621a07b
--- /dev/null
+++ b/tokenizer_config.json
@@ -0,0 +1,2064 @@
+{
+ "added_tokens_decoder": {
+ "128000": {
+ "content": "<|begin_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128001": {
+ "content": "<|end_of_text|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128002": {
+ "content": "<|reserved_special_token_0|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128003": {
+ "content": "<|reserved_special_token_1|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128004": {
+ "content": "<|finetune_right_pad_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128005": {
+ "content": "<|reserved_special_token_2|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128006": {
+ "content": "<|start_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128007": {
+ "content": "<|end_header_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128008": {
+ "content": "<|eom_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128009": {
+ "content": "<|eot_id|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128010": {
+ "content": "<|python_tag|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128011": {
+ "content": "<|reserved_special_token_3|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128012": {
+ "content": "<|reserved_special_token_4|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128013": {
+ "content": "<|reserved_special_token_5|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128014": {
+ "content": "<|reserved_special_token_6|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128015": {
+ "content": "<|reserved_special_token_7|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128016": {
+ "content": "<|reserved_special_token_8|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128017": {
+ "content": "<|reserved_special_token_9|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128018": {
+ "content": "<|reserved_special_token_10|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128019": {
+ "content": "<|reserved_special_token_11|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128020": {
+ "content": "<|reserved_special_token_12|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128021": {
+ "content": "<|reserved_special_token_13|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128022": {
+ "content": "<|reserved_special_token_14|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128023": {
+ "content": "<|reserved_special_token_15|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128024": {
+ "content": "<|reserved_special_token_16|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128025": {
+ "content": "<|reserved_special_token_17|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128026": {
+ "content": "<|reserved_special_token_18|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128027": {
+ "content": "<|reserved_special_token_19|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128028": {
+ "content": "<|reserved_special_token_20|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128029": {
+ "content": "<|reserved_special_token_21|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128030": {
+ "content": "<|reserved_special_token_22|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128031": {
+ "content": "<|reserved_special_token_23|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128032": {
+ "content": "<|reserved_special_token_24|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128033": {
+ "content": "<|reserved_special_token_25|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128034": {
+ "content": "<|reserved_special_token_26|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128035": {
+ "content": "<|reserved_special_token_27|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128036": {
+ "content": "<|reserved_special_token_28|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128037": {
+ "content": "<|reserved_special_token_29|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128038": {
+ "content": "<|reserved_special_token_30|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128039": {
+ "content": "<|reserved_special_token_31|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128040": {
+ "content": "<|reserved_special_token_32|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128041": {
+ "content": "<|reserved_special_token_33|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128042": {
+ "content": "<|reserved_special_token_34|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128043": {
+ "content": "<|reserved_special_token_35|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128044": {
+ "content": "<|reserved_special_token_36|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128045": {
+ "content": "<|reserved_special_token_37|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128046": {
+ "content": "<|reserved_special_token_38|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128047": {
+ "content": "<|reserved_special_token_39|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128048": {
+ "content": "<|reserved_special_token_40|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128049": {
+ "content": "<|reserved_special_token_41|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128050": {
+ "content": "<|reserved_special_token_42|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128051": {
+ "content": "<|reserved_special_token_43|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128052": {
+ "content": "<|reserved_special_token_44|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128053": {
+ "content": "<|reserved_special_token_45|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128054": {
+ "content": "<|reserved_special_token_46|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128055": {
+ "content": "<|reserved_special_token_47|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128056": {
+ "content": "<|reserved_special_token_48|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128057": {
+ "content": "<|reserved_special_token_49|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128058": {
+ "content": "<|reserved_special_token_50|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128059": {
+ "content": "<|reserved_special_token_51|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128060": {
+ "content": "<|reserved_special_token_52|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128061": {
+ "content": "<|reserved_special_token_53|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128062": {
+ "content": "<|reserved_special_token_54|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128063": {
+ "content": "<|reserved_special_token_55|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128064": {
+ "content": "<|reserved_special_token_56|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128065": {
+ "content": "<|reserved_special_token_57|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128066": {
+ "content": "<|reserved_special_token_58|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128067": {
+ "content": "<|reserved_special_token_59|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128068": {
+ "content": "<|reserved_special_token_60|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128069": {
+ "content": "<|reserved_special_token_61|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128070": {
+ "content": "<|reserved_special_token_62|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128071": {
+ "content": "<|reserved_special_token_63|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128072": {
+ "content": "<|reserved_special_token_64|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128073": {
+ "content": "<|reserved_special_token_65|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128074": {
+ "content": "<|reserved_special_token_66|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128075": {
+ "content": "<|reserved_special_token_67|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128076": {
+ "content": "<|reserved_special_token_68|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128077": {
+ "content": "<|reserved_special_token_69|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128078": {
+ "content": "<|reserved_special_token_70|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128079": {
+ "content": "<|reserved_special_token_71|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128080": {
+ "content": "<|reserved_special_token_72|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128081": {
+ "content": "<|reserved_special_token_73|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128082": {
+ "content": "<|reserved_special_token_74|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128083": {
+ "content": "<|reserved_special_token_75|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128084": {
+ "content": "<|reserved_special_token_76|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128085": {
+ "content": "<|reserved_special_token_77|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128086": {
+ "content": "<|reserved_special_token_78|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128087": {
+ "content": "<|reserved_special_token_79|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128088": {
+ "content": "<|reserved_special_token_80|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128089": {
+ "content": "<|reserved_special_token_81|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128090": {
+ "content": "<|reserved_special_token_82|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128091": {
+ "content": "<|reserved_special_token_83|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128092": {
+ "content": "<|reserved_special_token_84|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128093": {
+ "content": "<|reserved_special_token_85|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128094": {
+ "content": "<|reserved_special_token_86|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128095": {
+ "content": "<|reserved_special_token_87|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128096": {
+ "content": "<|reserved_special_token_88|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128097": {
+ "content": "<|reserved_special_token_89|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128098": {
+ "content": "<|reserved_special_token_90|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128099": {
+ "content": "<|reserved_special_token_91|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128100": {
+ "content": "<|reserved_special_token_92|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128101": {
+ "content": "<|reserved_special_token_93|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128102": {
+ "content": "<|reserved_special_token_94|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128103": {
+ "content": "<|reserved_special_token_95|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128104": {
+ "content": "<|reserved_special_token_96|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128105": {
+ "content": "<|reserved_special_token_97|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128106": {
+ "content": "<|reserved_special_token_98|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128107": {
+ "content": "<|reserved_special_token_99|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128108": {
+ "content": "<|reserved_special_token_100|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128109": {
+ "content": "<|reserved_special_token_101|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128110": {
+ "content": "<|reserved_special_token_102|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128111": {
+ "content": "<|reserved_special_token_103|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128112": {
+ "content": "<|reserved_special_token_104|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128113": {
+ "content": "<|reserved_special_token_105|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128114": {
+ "content": "<|reserved_special_token_106|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128115": {
+ "content": "<|reserved_special_token_107|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128116": {
+ "content": "<|reserved_special_token_108|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128117": {
+ "content": "<|reserved_special_token_109|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128118": {
+ "content": "<|reserved_special_token_110|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128119": {
+ "content": "<|reserved_special_token_111|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128120": {
+ "content": "<|reserved_special_token_112|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128121": {
+ "content": "<|reserved_special_token_113|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128122": {
+ "content": "<|reserved_special_token_114|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128123": {
+ "content": "<|reserved_special_token_115|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128124": {
+ "content": "<|reserved_special_token_116|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128125": {
+ "content": "<|reserved_special_token_117|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128126": {
+ "content": "<|reserved_special_token_118|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128127": {
+ "content": "<|reserved_special_token_119|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128128": {
+ "content": "<|reserved_special_token_120|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128129": {
+ "content": "<|reserved_special_token_121|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128130": {
+ "content": "<|reserved_special_token_122|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128131": {
+ "content": "<|reserved_special_token_123|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128132": {
+ "content": "<|reserved_special_token_124|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128133": {
+ "content": "<|reserved_special_token_125|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128134": {
+ "content": "<|reserved_special_token_126|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128135": {
+ "content": "<|reserved_special_token_127|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128136": {
+ "content": "<|reserved_special_token_128|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128137": {
+ "content": "<|reserved_special_token_129|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128138": {
+ "content": "<|reserved_special_token_130|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128139": {
+ "content": "<|reserved_special_token_131|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128140": {
+ "content": "<|reserved_special_token_132|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128141": {
+ "content": "<|reserved_special_token_133|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128142": {
+ "content": "<|reserved_special_token_134|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128143": {
+ "content": "<|reserved_special_token_135|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128144": {
+ "content": "<|reserved_special_token_136|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128145": {
+ "content": "<|reserved_special_token_137|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128146": {
+ "content": "<|reserved_special_token_138|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128147": {
+ "content": "<|reserved_special_token_139|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128148": {
+ "content": "<|reserved_special_token_140|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128149": {
+ "content": "<|reserved_special_token_141|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128150": {
+ "content": "<|reserved_special_token_142|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128151": {
+ "content": "<|reserved_special_token_143|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128152": {
+ "content": "<|reserved_special_token_144|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128153": {
+ "content": "<|reserved_special_token_145|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128154": {
+ "content": "<|reserved_special_token_146|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128155": {
+ "content": "<|reserved_special_token_147|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128156": {
+ "content": "<|reserved_special_token_148|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128157": {
+ "content": "<|reserved_special_token_149|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128158": {
+ "content": "<|reserved_special_token_150|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128159": {
+ "content": "<|reserved_special_token_151|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128160": {
+ "content": "<|reserved_special_token_152|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128161": {
+ "content": "<|reserved_special_token_153|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128162": {
+ "content": "<|reserved_special_token_154|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128163": {
+ "content": "<|reserved_special_token_155|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128164": {
+ "content": "<|reserved_special_token_156|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128165": {
+ "content": "<|reserved_special_token_157|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128166": {
+ "content": "<|reserved_special_token_158|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128167": {
+ "content": "<|reserved_special_token_159|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128168": {
+ "content": "<|reserved_special_token_160|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128169": {
+ "content": "<|reserved_special_token_161|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128170": {
+ "content": "<|reserved_special_token_162|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128171": {
+ "content": "<|reserved_special_token_163|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128172": {
+ "content": "<|reserved_special_token_164|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128173": {
+ "content": "<|reserved_special_token_165|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128174": {
+ "content": "<|reserved_special_token_166|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128175": {
+ "content": "<|reserved_special_token_167|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128176": {
+ "content": "<|reserved_special_token_168|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128177": {
+ "content": "<|reserved_special_token_169|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128178": {
+ "content": "<|reserved_special_token_170|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128179": {
+ "content": "<|reserved_special_token_171|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128180": {
+ "content": "<|reserved_special_token_172|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128181": {
+ "content": "<|reserved_special_token_173|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128182": {
+ "content": "<|reserved_special_token_174|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128183": {
+ "content": "<|reserved_special_token_175|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128184": {
+ "content": "<|reserved_special_token_176|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128185": {
+ "content": "<|reserved_special_token_177|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128186": {
+ "content": "<|reserved_special_token_178|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128187": {
+ "content": "<|reserved_special_token_179|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128188": {
+ "content": "<|reserved_special_token_180|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128189": {
+ "content": "<|reserved_special_token_181|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128190": {
+ "content": "<|reserved_special_token_182|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128191": {
+ "content": "<|reserved_special_token_183|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128192": {
+ "content": "<|reserved_special_token_184|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128193": {
+ "content": "<|reserved_special_token_185|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128194": {
+ "content": "<|reserved_special_token_186|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128195": {
+ "content": "<|reserved_special_token_187|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128196": {
+ "content": "<|reserved_special_token_188|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128197": {
+ "content": "<|reserved_special_token_189|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128198": {
+ "content": "<|reserved_special_token_190|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128199": {
+ "content": "<|reserved_special_token_191|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128200": {
+ "content": "<|reserved_special_token_192|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128201": {
+ "content": "<|reserved_special_token_193|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128202": {
+ "content": "<|reserved_special_token_194|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128203": {
+ "content": "<|reserved_special_token_195|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128204": {
+ "content": "<|reserved_special_token_196|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128205": {
+ "content": "<|reserved_special_token_197|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128206": {
+ "content": "<|reserved_special_token_198|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128207": {
+ "content": "<|reserved_special_token_199|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128208": {
+ "content": "<|reserved_special_token_200|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128209": {
+ "content": "<|reserved_special_token_201|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128210": {
+ "content": "<|reserved_special_token_202|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128211": {
+ "content": "<|reserved_special_token_203|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128212": {
+ "content": "<|reserved_special_token_204|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128213": {
+ "content": "<|reserved_special_token_205|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128214": {
+ "content": "<|reserved_special_token_206|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128215": {
+ "content": "<|reserved_special_token_207|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128216": {
+ "content": "<|reserved_special_token_208|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128217": {
+ "content": "<|reserved_special_token_209|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128218": {
+ "content": "<|reserved_special_token_210|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128219": {
+ "content": "<|reserved_special_token_211|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128220": {
+ "content": "<|reserved_special_token_212|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128221": {
+ "content": "<|reserved_special_token_213|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128222": {
+ "content": "<|reserved_special_token_214|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128223": {
+ "content": "<|reserved_special_token_215|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128224": {
+ "content": "<|reserved_special_token_216|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128225": {
+ "content": "<|reserved_special_token_217|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128226": {
+ "content": "<|reserved_special_token_218|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128227": {
+ "content": "<|reserved_special_token_219|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128228": {
+ "content": "<|reserved_special_token_220|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128229": {
+ "content": "<|reserved_special_token_221|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128230": {
+ "content": "<|reserved_special_token_222|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128231": {
+ "content": "<|reserved_special_token_223|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128232": {
+ "content": "<|reserved_special_token_224|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128233": {
+ "content": "<|reserved_special_token_225|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128234": {
+ "content": "<|reserved_special_token_226|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128235": {
+ "content": "<|reserved_special_token_227|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128236": {
+ "content": "<|reserved_special_token_228|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128237": {
+ "content": "<|reserved_special_token_229|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128238": {
+ "content": "<|reserved_special_token_230|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128239": {
+ "content": "<|reserved_special_token_231|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128240": {
+ "content": "<|reserved_special_token_232|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128241": {
+ "content": "<|reserved_special_token_233|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128242": {
+ "content": "<|reserved_special_token_234|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128243": {
+ "content": "<|reserved_special_token_235|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128244": {
+ "content": "<|reserved_special_token_236|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128245": {
+ "content": "<|reserved_special_token_237|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128246": {
+ "content": "<|reserved_special_token_238|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128247": {
+ "content": "<|reserved_special_token_239|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128248": {
+ "content": "<|reserved_special_token_240|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128249": {
+ "content": "<|reserved_special_token_241|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128250": {
+ "content": "<|reserved_special_token_242|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128251": {
+ "content": "<|reserved_special_token_243|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128252": {
+ "content": "<|reserved_special_token_244|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128253": {
+ "content": "<|reserved_special_token_245|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128254": {
+ "content": "<|reserved_special_token_246|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "128255": {
+ "content": "<|reserved_special_token_247|>",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "<|begin_of_text|>",
+ "chat_template": "{{- bos_token }}\n{%- if custom_tools is defined %}\n {%- set tools = custom_tools %}\n{%- endif %}\n{%- if not tools_in_user_message is defined %}\n {%- set tools_in_user_message = true %}\n{%- endif %}\n{%- if not date_string is defined %}\n {%- set date_string = \"26 Jul 2024\" %}\n{%- endif %}\n{%- if not tools is defined %}\n {%- set tools = none %}\n{%- endif %}\n\n{#- This block extracts the system message, so we can slot it into the right place. #}\n{%- if messages[0]['role'] == 'system' %}\n {%- set system_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n{%- else %}\n {%- set system_message = \"\" %}\n{%- endif %}\n\n{#- System message + builtin tools #}\n{{- \"<|start_header_id|>system<|end_header_id|>\\n\\n\" }}\n{%- if builtin_tools is defined or tools is not none %}\n {{- \"Environment: ipython\\n\" }}\n{%- endif %}\n{%- if builtin_tools is defined %}\n {{- \"Tools: \" + builtin_tools | reject('equalto', 'code_interpreter') | join(\", \") + \"\\n\\n\"}}\n{%- endif %}\n{{- \"Cutting Knowledge Date: December 2023\\n\" }}\n{{- \"Today Date: \" + date_string + \"\\n\\n\" }}\n{%- if tools is not none and not tools_in_user_message %}\n {{- \"You have access to the following functions. To call a function, please respond with JSON for a function call.\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n{%- endif %}\n{{- system_message }}\n{{- \"<|eot_id|>\" }}\n\n{#- Custom tools are passed in a user message with some extra guidance #}\n{%- if tools_in_user_message and not tools is none %}\n {#- Extract the first user message so we can plug it in here #}\n {%- if messages | length != 0 %}\n {%- set first_user_message = messages[0]['content']|trim %}\n {%- set messages = messages[1:] %}\n {%- else %}\n {{- raise_exception(\"Cannot put tools in the first user message when there's no first user message!\") }}\n{%- endif %}\n {{- '<|start_header_id|>user<|end_header_id|>\\n\\n' -}}\n {{- \"Given the following functions, please respond with a JSON for a function call \" }}\n {{- \"with its proper arguments that best answers the given prompt.\\n\\n\" }}\n {{- 'Respond in the format {\"name\": function name, \"parameters\": dictionary of argument name and its value}.' }}\n {{- \"Do not use variables.\\n\\n\" }}\n {%- for t in tools %}\n {{- t | tojson(indent=4) }}\n {{- \"\\n\\n\" }}\n {%- endfor %}\n {{- first_user_message + \"<|eot_id|>\"}}\n{%- endif %}\n\n{%- for message in messages %}\n {%- if not (message.role == 'ipython' or message.role == 'tool' or 'tool_calls' in message) %}\n {{- '<|start_header_id|>' + message['role'] + '<|end_header_id|>\\n\\n'+ message['content'] | trim + '<|eot_id|>' }}\n {%- elif 'tool_calls' in message %}\n {%- if not message.tool_calls|length == 1 %}\n {{- raise_exception(\"This model only supports single tool-calls at once!\") }}\n {%- endif %}\n {%- set tool_call = message.tool_calls[0].function %}\n {%- if builtin_tools is defined and tool_call.name in builtin_tools %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- \"<|python_tag|>\" + tool_call.name + \".call(\" }}\n {%- for arg_name, arg_val in tool_call.arguments | items %}\n {{- arg_name + '=\"' + arg_val + '\"' }}\n {%- if not loop.last %}\n {{- \", \" }}\n {%- endif %}\n {%- endfor %}\n {{- \")\" }}\n {%- else %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' -}}\n {{- '{\"name\": \"' + tool_call.name + '\", ' }}\n {{- '\"parameters\": ' }}\n {{- tool_call.arguments | tojson }}\n {{- \"}\" }}\n {%- endif %}\n {%- if builtin_tools is defined %}\n {#- This means we're in ipython mode #}\n {{- \"<|eom_id|>\" }}\n {%- else %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n {%- elif message.role == \"tool\" or message.role == \"ipython\" %}\n {{- \"<|start_header_id|>ipython<|end_header_id|>\\n\\n\" }}\n {%- if message.content is mapping or message.content is iterable %}\n {{- message.content | tojson }}\n {%- else %}\n {{- message.content }}\n {%- endif %}\n {{- \"<|eot_id|>\" }}\n {%- endif %}\n{%- endfor %}\n{%- if add_generation_prompt %}\n {{- '<|start_header_id|>assistant<|end_header_id|>\\n\\n' }}\n{%- endif %}\n",
+ "clean_up_tokenization_spaces": true,
+ "eos_token": "<|eot_id|>",
+ "extra_special_tokens": {},
+ "model_input_names": [
+ "input_ids",
+ "attention_mask"
+ ],
+ "model_max_length": 131072,
+ "pad_token": "<|end_of_text|>",
+ "tokenizer_class": "PreTrainedTokenizer"
+}