diff --git a/.gitattributes b/.gitattributes index a6344aac8c09253b3b630fb776ae94478aa0275b..c19f00c6634715fdcc13b6d7cbf33861226903fc 100644 --- a/.gitattributes +++ b/.gitattributes @@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text *.zip filter=lfs diff=lfs merge=lfs -text *.zst filter=lfs diff=lfs merge=lfs -text *tfevents* filter=lfs diff=lfs merge=lfs -text +checkpoint-epoch1/tokenizer.json filter=lfs diff=lfs merge=lfs -text +checkpoint-epoch2/tokenizer.json filter=lfs diff=lfs merge=lfs -text +checkpoint-epoch3/tokenizer.json filter=lfs diff=lfs merge=lfs -text +checkpoint-epoch4/tokenizer.json filter=lfs diff=lfs merge=lfs -text +checkpoint-epoch5/tokenizer.json filter=lfs diff=lfs merge=lfs -text diff --git a/checkpoint-epoch1/README.md b/checkpoint-epoch1/README.md new file mode 100644 index 0000000000000000000000000000000000000000..eb360f4bae4142aa4d896cd7d618b6bbe284d00b --- /dev/null +++ b/checkpoint-epoch1/README.md @@ -0,0 +1,61 @@ +--- +library_name: transformers +license: other +base_model: /data/yanshi.xy/hf_models/Gemma3-4B-SFT +tags: +- llama-factory +- full +- generated_from_trainer +model-index: +- name: checkpoint-epoch1 + results: [] +--- + + + +# checkpoint-epoch1 + +This model is a fine-tuned version of [/data/yanshi.xy/hf_models/Gemma3-4B-SFT](https://huggingface.co//data/yanshi.xy/hf_models/Gemma3-4B-SFT) on the geo_train_gemma3_1_cw0 dataset. + +## Model description + +More information needed + +## Intended uses & limitations + +More information needed + +## Training and evaluation data + +More information needed + +## Training procedure + +### Training hyperparameters + +The following hyperparameters were used during training: +- learning_rate: 1e-05 +- train_batch_size: 4 +- eval_batch_size: 8 +- seed: 42 +- distributed_type: multi-GPU +- num_devices: 4 +- gradient_accumulation_steps: 8 +- total_train_batch_size: 128 +- total_eval_batch_size: 32 +- optimizer: Use adamw_torch with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments +- lr_scheduler_type: cosine +- lr_scheduler_warmup_ratio: 0.03 +- num_epochs: 1 + +### Training results + + + +### Framework versions + +- Transformers 4.51.3 +- Pytorch 2.6.0+cu124 +- Datasets 3.3.2 +- Tokenizers 0.21.1 diff --git a/checkpoint-epoch1/added_tokens.json b/checkpoint-epoch1/added_tokens.json new file mode 100644 index 0000000000000000000000000000000000000000..e17bde03d42feda32d1abfca6d3b598b9a020df7 --- /dev/null +++ b/checkpoint-epoch1/added_tokens.json @@ -0,0 +1,3 @@ +{ + "": 262144 +} diff --git a/checkpoint-epoch1/all_results.json b/checkpoint-epoch1/all_results.json new file mode 100644 index 0000000000000000000000000000000000000000..2f9a4d3c8accfc22cfd8d7d5b0a9b81bf0ccd1ba --- /dev/null +++ b/checkpoint-epoch1/all_results.json @@ -0,0 +1,8 @@ +{ + "epoch": 0.9984, + "total_flos": 119006989647872.0, + "train_loss": 0.4158584708586717, + "train_runtime": 907.9786, + "train_samples_per_second": 11.013, + "train_steps_per_second": 0.086 +} \ No newline at end of file diff --git a/checkpoint-epoch1/chat_template.json b/checkpoint-epoch1/chat_template.json new file mode 100644 index 0000000000000000000000000000000000000000..719b0cd0d7a373a400b0c119ee0e051f41ea88d9 --- /dev/null +++ b/checkpoint-epoch1/chat_template.json @@ -0,0 +1,3 @@ +{ + "chat_template": "{{ bos_token }}\n{%- if messages[0]['role'] == 'system' -%}\n {%- if messages[0]['content'] is string -%}\n {%- set first_user_prefix = messages[0]['content'] + '\n\n' -%}\n {%- else -%}\n {%- set first_user_prefix = messages[0]['content'][0]['text'] + '\n\n' -%}\n {%- endif -%}\n {%- set loop_messages = messages[1:] -%}\n{%- else -%}\n {%- set first_user_prefix = \"\" -%}\n {%- set loop_messages = messages -%}\n{%- endif -%}\n{%- for message in loop_messages -%}\n {%- if (message['role'] == 'user') != (loop.index0 % 2 == 0) -%}\n {{ raise_exception(\"Conversation roles must alternate user/assistant/user/assistant/...\") }}\n {%- endif -%}\n {%- if (message['role'] == 'assistant') -%}\n {%- set role = \"model\" -%}\n {%- else -%}\n {%- set role = message['role'] -%}\n {%- endif -%}\n {{ '' + role + '\n' + (first_user_prefix if loop.first else \"\") }}\n {%- if message['content'] is string -%}\n {{ message['content'] | trim }}\n {%- elif message['content'] is iterable -%}\n {%- for item in message['content'] -%}\n {%- if item['type'] == 'image' -%}\n {{ '' }}\n {%- elif item['type'] == 'text' -%}\n {{ item['text'] | trim }}\n {%- endif -%}\n {%- endfor -%}\n {%- else -%}\n {{ raise_exception(\"Invalid content type\") }}\n {%- endif -%}\n {{ '\n' }}\n{%- endfor -%}\n{%- if add_generation_prompt -%}\n {{'model\n'}}\n{%- endif -%}\n" +} diff --git a/checkpoint-epoch1/config.json b/checkpoint-epoch1/config.json new file mode 100644 index 0000000000000000000000000000000000000000..9405b59c17478f0168aed53bf5c76773450e8c86 --- /dev/null +++ b/checkpoint-epoch1/config.json @@ -0,0 +1,65 @@ +{ + "architectures": [ + "Gemma3ForConditionalGeneration" + ], + "boi_token_index": 255999, + "eoi_token_index": 256000, + "eos_token_id": [ + 1, + 106 + ], + "hidden_size": 2560, + "image_token_index": 262144, + "initializer_range": 0.02, + "mm_tokens_per_image": 256, + "model_type": "gemma3", + "text_config": { + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": null, + "cache_implementation": "hybrid", + "final_logit_softcapping": null, + "head_dim": 256, + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 2560, + "initializer_range": 0.02, + "intermediate_size": 10240, + "max_position_embeddings": 131072, + "model_type": "gemma3_text", + "num_attention_heads": 8, + "num_hidden_layers": 34, + "num_key_value_heads": 4, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_local_base_freq": 10000.0, + "rope_scaling": { + "factor": 8.0, + "rope_type": "linear" + }, + "rope_theta": 1000000.0, + "sliding_window": 1024, + "sliding_window_pattern": 6, + "torch_dtype": "float32", + "use_cache": false, + "vocab_size": 262208 + }, + "torch_dtype": "bfloat16", + "transformers_version": "4.51.3", + "use_cache": false, + "vision_config": { + "attention_dropout": 0.0, + "hidden_act": "gelu_pytorch_tanh", + "hidden_size": 1152, + "image_size": 896, + "intermediate_size": 4304, + "layer_norm_eps": 1e-06, + "model_type": "siglip_vision_model", + "num_attention_heads": 16, + "num_channels": 3, + "num_hidden_layers": 27, + "patch_size": 14, + "torch_dtype": "float32", + "vision_use_head": false + }, + "vocab_size": 262144 +} diff --git a/checkpoint-epoch1/generation_config.json b/checkpoint-epoch1/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..c4400ddce37d459b3517e7431aab4cadd988f4f8 --- /dev/null +++ b/checkpoint-epoch1/generation_config.json @@ -0,0 +1,13 @@ +{ + "bos_token_id": 2, + "cache_implementation": "hybrid", + "do_sample": true, + "eos_token_id": [ + 1, + 106 + ], + "pad_token_id": 0, + "top_k": 64, + "top_p": 0.95, + "transformers_version": "4.51.3" +} diff --git a/checkpoint-epoch1/model-00001-of-00002.safetensors b/checkpoint-epoch1/model-00001-of-00002.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..177dd860f1f0a5e11540bee1d56328171b0fb7c2 --- /dev/null +++ b/checkpoint-epoch1/model-00001-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:920592f1881b3d213dd59e131eb1382b5f7905d3b0e0a346dd32aa4fddc6157c +size 4961251752 diff --git a/checkpoint-epoch1/model-00002-of-00002.safetensors b/checkpoint-epoch1/model-00002-of-00002.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..300ab0def99dbee79aa6c1de1f3c76c98852f992 --- /dev/null +++ b/checkpoint-epoch1/model-00002-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fdde0e5aa5ced0fa203b3d50f4ab78168b7e3a3e08c6349f5cc9326666e1bb13 +size 3639026128 diff --git a/checkpoint-epoch1/model.safetensors.index.json b/checkpoint-epoch1/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..4b95241f208f06d324d17c9675568ec58dafd9fb --- /dev/null +++ b/checkpoint-epoch1/model.safetensors.index.json @@ -0,0 +1,890 @@ +{ + "metadata": { + "total_size": 8600158944 + }, + "weight_map": { + "language_model.model.embed_tokens.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.15.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.15.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.16.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.post_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.pre_feedforward_layernorm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors", + "language_model.model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "language_model.model.norm.weight": "model-00002-of-00002.safetensors", + "multi_modal_projector.mm_input_projection_weight": "model-00001-of-00002.safetensors", + "multi_modal_projector.mm_soft_emb_norm.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.embeddings.patch_embedding.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.embeddings.position_embedding.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.24.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.25.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.26.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.post_layernorm.bias": "model-00001-of-00002.safetensors", + "vision_tower.vision_model.post_layernorm.weight": "model-00001-of-00002.safetensors" + } +} diff --git a/checkpoint-epoch1/preprocessor_config.json b/checkpoint-epoch1/preprocessor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..b1e00fc184f61b698181821169c6374cd5813e5c --- /dev/null +++ b/checkpoint-epoch1/preprocessor_config.json @@ -0,0 +1,29 @@ +{ + "do_convert_rgb": null, + "do_normalize": true, + "do_pan_and_scan": null, + "do_rescale": true, + "do_resize": true, + "image_mean": [ + 0.5, + 0.5, + 0.5 + ], + "image_processor_type": "Gemma3ImageProcessor", + "image_seq_length": 256, + "image_std": [ + 0.5, + 0.5, + 0.5 + ], + "pan_and_scan_max_num_crops": null, + "pan_and_scan_min_crop_size": null, + "pan_and_scan_min_ratio_to_activate": null, + "processor_class": "Gemma3Processor", + "resample": 2, + "rescale_factor": 0.00392156862745098, + "size": { + "height": 896, + "width": 896 + } +} diff --git a/checkpoint-epoch1/processor_config.json b/checkpoint-epoch1/processor_config.json new file mode 100644 index 0000000000000000000000000000000000000000..453c7966d4b5d0b4a317c585989f64c58c2a6bf0 --- /dev/null +++ b/checkpoint-epoch1/processor_config.json @@ -0,0 +1,4 @@ +{ + "image_seq_length": 256, + "processor_class": "Gemma3Processor" +} diff --git a/checkpoint-epoch1/special_tokens_map.json b/checkpoint-epoch1/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..b7a8e475cd5417e123a7a7cdbf33d1e979c5e45f --- /dev/null +++ b/checkpoint-epoch1/special_tokens_map.json @@ -0,0 +1,36 @@ +{ + "additional_special_tokens": [ + "" + ], + "boi_token": "", + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eoi_token": "", + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "image_token": "", + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/checkpoint-epoch1/tokenizer.json b/checkpoint-epoch1/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..29401f984828a18bb09a6128d437c6766785eb66 --- /dev/null +++ b/checkpoint-epoch1/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4667f2089529e8e7657cfb6d1c19910ae71ff5f28aa7ab2ff2763330affad795 +size 33384568 diff --git a/checkpoint-epoch1/tokenizer.model b/checkpoint-epoch1/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..14f810a829755bae3fafd6f97096dbd2eac556bd --- /dev/null +++ b/checkpoint-epoch1/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1299c11d7cf632ef3b4e11937501358ada021bbdf7c47638d13c0ee982f2e79c +size 4689074 diff --git a/checkpoint-epoch1/tokenizer_config.json b/checkpoint-epoch1/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..5a4d5f629e553dea3748668210c5d63176af8aa3 --- /dev/null +++ b/checkpoint-epoch1/tokenizer_config.json @@ -0,0 +1,51351 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "5": { + "content": "[multimodal]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "6": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "106": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "107": { + "content": "\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "108": { + "content": "\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "109": { + "content": "\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "110": { + "content": "\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "111": { + "content": "\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "112": { + "content": "\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "113": { + "content": "\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "114": { + "content": "\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "115": { + "content": "\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "116": { + "content": "\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "117": { + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "118": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "119": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "120": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "121": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "122": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "123": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "124": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "125": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "126": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "127": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "128": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "129": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "130": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "131": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "132": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "133": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "134": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "135": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "136": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "137": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "138": { + "content": "▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "139": { + "content": "▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "140": { + "content": "▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "141": { + "content": "▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "142": { + "content": "▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "143": { + "content": "▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "144": { + "content": "▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "145": { + "content": "▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "146": { + "content": "▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "147": { + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "148": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "149": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "150": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "151": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "152": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "153": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "154": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "155": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "156": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "157": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "158": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "159": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "160": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "161": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "162": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "163": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "164": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "165": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "166": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "167": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "168": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "169": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "171": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "172": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "173": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "174": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "170": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "175": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "176": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "177": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "178": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "179": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "180": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "181": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "182": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "183": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "184": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "185": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "186": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "187": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "188": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "189": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "190": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "191": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "192": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "193": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "194": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "195": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "196": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "197": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "198": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "199": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "200": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "201": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "202": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "203": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "204": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "205": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "206": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "207": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "208": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "209": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "210": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "211": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "212": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "213": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "214": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "215": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "216": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "217": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "218": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "219": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "220": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "221": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "222": { + "content": "