diff --git a/.gitattributes b/.gitattributes index 43557b3ad941a0fe1987fa243b59cbd7e958d5d0..b280943e3c0eb8a95bf507c6d0419c7a0485b636 100644 --- a/.gitattributes +++ b/.gitattributes @@ -90,3 +90,9 @@ phi3/phi3_4k__fft__instruct__masked__75/gguf/phi3_4k__fft__instruct__masked__75_ qwen25/qwen25_7b__fft__instruct__masked/merged/checkpoint-1500/tokenizer.json filter=lfs diff=lfs merge=lfs -text qwen25/qwen25_7b__fft__instruct__masked/merged/checkpoint-1626/tokenizer.json filter=lfs diff=lfs merge=lfs -text qwen25/qwen25_7b__fft__instruct__masked/merged/tokenizer.json filter=lfs diff=lfs merge=lfs -text +gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.json filter=lfs diff=lfs merge=lfs -text +gemma2/gemma2_9b__fft__instruct__masked/gguf/gemma2_9b__fft__instruct__masked_q8_0.gguf filter=lfs diff=lfs merge=lfs -text +gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.json filter=lfs diff=lfs merge=lfs -text +gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.json filter=lfs diff=lfs merge=lfs -text +gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.json filter=lfs diff=lfs merge=lfs -text +gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.json filter=lfs diff=lfs merge=lfs -text diff --git a/gemma2/gemma2_9b__fft__instruct__masked/TRAINING_TAGS.json b/gemma2/gemma2_9b__fft__instruct__masked/TRAINING_TAGS.json new file mode 100644 index 0000000000000000000000000000000000000000..90fea09c6e92dce17a5b648519a04a2973724ea8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/TRAINING_TAGS.json @@ -0,0 +1,23 @@ +{ + "variant_name": "gemma2_9b__fft__instruct__masked", + "model_id": "google/gemma-2-9b-it", + "method": "fft", + "is_instruct": true, + "loss_mode": "masked", + "max_length": 1024, + "epochs": 3, + "lr": 2e-05, + "bs": 1, + "grad_accum": 16, + "dtype": "bfloat16", + "num_gpus": 2, + "split_sizes": { + "train": 4119, + "validation": 217 + }, + "outputs": { + "merged": "./outputs_gemma2/gemma2_9b__fft__instruct__masked/merged", + "gguf": "./outputs_gemma2/gemma2_9b__fft__instruct__masked/gguf", + "awq": "./outputs_gemma2/gemma2_9b__fft__instruct__masked/awq" + } +} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/chat_template.jinja b/gemma2/gemma2_9b__fft__instruct__masked/awq/chat_template.jinja new file mode 100644 index 0000000000000000000000000000000000000000..923ec253c8dbefbb41cf084db7251df41d000f6d --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/chat_template.jinja @@ -0,0 +1,4 @@ +{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + ' +' + message['content'] | trim + ' +' }}{% endfor %}{% if add_generation_prompt %}{{'model +'}}{% endif %} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/config.json b/gemma2/gemma2_9b__fft__instruct__masked/awq/config.json new file mode 100644 index 0000000000000000000000000000000000000000..bc0dfa91f4a8f7b086892d1bd67701e25472055d --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/config.json @@ -0,0 +1,41 @@ +{ + "architectures": [ + "Gemma2ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": 50.0, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "final_logit_softcapping": 30.0, + "head_dim": 256, + "hidden_act": "gelu_pytorch_tanh", + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 3584, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 8192, + "model_type": "gemma2", + "num_attention_heads": 16, + "num_hidden_layers": 42, + "num_key_value_heads": 8, + "pad_token_id": 0, + "quantization_config": { + "bits": 4, + "group_size": 128, + "modules_to_not_convert": null, + "quant_method": "awq", + "version": "gemm", + "zero_point": true + }, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_theta": 10000.0, + "sliding_window": 4096, + "sliding_window_size": 4096, + "torch_dtype": "float16", + "transformers_version": "4.52.4", + "use_cache": false, + "vocab_size": 256000 +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/generation_config.json b/gemma2/gemma2_9b__fft__instruct__masked/awq/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e0923189337fd71dc3294f08c5c9b281052209cf --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/generation_config.json @@ -0,0 +1,9 @@ +{ + "_from_model_config": true, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "do_sample": true, + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.52.4" +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/model-00001-of-00002.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/awq/model-00001-of-00002.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d121ff9d072ecd42c6b58c9b60ad1f997dc27c00 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/model-00001-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8f4b251c2eec38c5ce4cd1377e990b06c244241832d7d36420fc866f40f10346 +size 4325699968 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/model-00002-of-00002.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/awq/model-00002-of-00002.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..4033b63b94093d311affee37a78bbdc48781857c --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/model-00002-of-00002.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:f011ad801a065b0910447adf75b99f70f37a0fc69a74f73d7697a24c70859062 +size 1835008128 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/model.safetensors.index.json b/gemma2/gemma2_9b__fft__instruct__masked/awq/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..02d05d52cfef7d1943d18e341cd0b9e6f5a1083b --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/model.safetensors.index.json @@ -0,0 +1,1060 @@ +{ + "metadata": { + "total_size": 6160587776, + "model.embed_tokens.weight": "lm_head.weight" + }, + "weight_map": { + "model.layers.0.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.0.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.1.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.2.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.3.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.4.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.5.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.6.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.7.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.7.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.7.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.8.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.8.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.8.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.9.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.9.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.9.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.10.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.10.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.10.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.11.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.11.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.11.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.12.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.12.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.12.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.13.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.13.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.13.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.14.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.14.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.14.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.15.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.15.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.15.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.16.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.16.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.16.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.16.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.17.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.17.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.17.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.17.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.18.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.18.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.18.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.18.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.19.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.19.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.19.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.19.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.20.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.20.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.20.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.20.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.21.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.21.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.21.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.21.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.22.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.22.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.22.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.22.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.23.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.23.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.23.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.23.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.24.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.24.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.24.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.24.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.25.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.25.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.25.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.25.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.26.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.26.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.26.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.26.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.27.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.27.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.27.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.27.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.28.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.28.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.28.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.28.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.29.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.29.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.29.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.29.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.30.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.30.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.30.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.30.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.31.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.31.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.31.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.31.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.32.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.32.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.32.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.32.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.33.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.33.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.33.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.33.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.34.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.34.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.34.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.34.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.35.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.35.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.35.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.35.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.36.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.36.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.36.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.36.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.37.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.37.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.37.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.37.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.38.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.38.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.38.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.38.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.39.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.39.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.39.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.39.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.40.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.40.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.40.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.40.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.q_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.q_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.q_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.k_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.k_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.k_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.v_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.v_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.v_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.o_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.o_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.self_attn.o_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.gate_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.gate_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.gate_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.up_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.up_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.up_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.down_proj.qweight": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.down_proj.qzeros": "model-00001-of-00002.safetensors", + "model.layers.41.mlp.down_proj.scales": "model-00001-of-00002.safetensors", + "model.layers.41.input_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.41.pre_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.layers.41.post_feedforward_layernorm.weight": "model-00001-of-00002.safetensors", + "model.norm.weight": "model-00001-of-00002.safetensors", + "lm_head.weight": "model-00002-of-00002.safetensors" + } +} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/special_tokens_map.json b/gemma2/gemma2_9b__fft__instruct__masked/awq/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..8d6368f7e735fbe4781bf6e956b7c6ad0586df80 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/special_tokens_map.json @@ -0,0 +1,34 @@ +{ + "additional_special_tokens": [ + "", + "" + ], + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.json b/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..a4a305d1de4d8f47c0252b4d7fe65a10dd8e2c22 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5f7eee611703c5ce5d1eee32d9cdcfe465647b8aff0c1dfb3bed7ad7dbb05060 +size 34362873 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.model b/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..796efe9ab515c15e146ce7588e6d7b9b8134dbf8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2 +size 4241003 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer_config.json b/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8f771152679b280c921fb7618212543f7b67b7b9 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/awq/tokenizer_config.json @@ -0,0 +1,2017 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "5": { + "content": "<2mass>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "6": { + "content": "[@BOS@]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "106": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "107": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "108": { + "content": "\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "109": { + "content": "\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "110": { + "content": "\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "111": { + "content": "\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "112": { + "content": "\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "113": { + "content": "\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "114": { + "content": "\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "115": { + "content": "\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "116": { + "content": "\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "117": { + "content": "\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "118": { + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "119": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "120": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "121": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "122": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "123": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "124": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "125": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "126": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "127": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "128": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "129": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "130": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "131": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "132": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "133": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "134": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "135": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "136": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "137": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "138": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "139": { + "content": "▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "140": { + "content": "▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "141": { + "content": "▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "142": { + "content": "▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "143": { + "content": "▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "144": { + "content": "▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "145": { + "content": "▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "146": { + "content": "▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "147": { + "content": "▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "148": { + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "149": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "150": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "151": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "152": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "153": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "154": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "155": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "156": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "157": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "158": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "159": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "160": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "161": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "162": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "163": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "164": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "165": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "166": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "167": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "168": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "169": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "170": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "172": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "173": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "174": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "175": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "171": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "176": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "177": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "178": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "179": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "180": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "181": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "182": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "183": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "184": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "185": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "186": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "187": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "188": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "189": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "190": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "191": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "192": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "193": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "194": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "195": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "196": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "197": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "198": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "199": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "200": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "201": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "202": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "203": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "204": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "205": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "206": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "207": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "208": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "209": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "210": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "211": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "212": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "213": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "214": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "215": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "216": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255968": { + "content": "[toxicity=0]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255969": { + "content": "\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255970": { + "content": "\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255971": { + "content": "\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255972": { + "content": "\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255973": { + "content": "\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255974": { + "content": "\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255975": { + "content": "\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255976": { + "content": "\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255977": { + "content": "\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255978": { + "content": "\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255979": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255980": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255981": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255982": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255983": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255984": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255985": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255986": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255987": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255988": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255989": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255990": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255991": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255992": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255993": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255994": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255995": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255996": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255997": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255998": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255999": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + } + }, + "additional_special_tokens": [ + "", + "" + ], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_special_tokens": {}, + "max_length": 1024, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "stride": 0, + "tokenizer_class": "GemmaTokenizer", + "truncation_side": "right", + "truncation_strategy": "longest_first", + "unk_token": "", + "use_default_system_prompt": false +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/eval_metrics.json b/gemma2/gemma2_9b__fft__instruct__masked/eval_metrics.json new file mode 100644 index 0000000000000000000000000000000000000000..2e56bdfe38861996d67e1764c25f22f2b4bbba12 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/eval_metrics.json @@ -0,0 +1,8 @@ +{ + "train_loss": 0.5453019758554104, + "eval_loss": 1.0891785621643066, + "eval_runtime": 70.6901, + "eval_samples_per_second": 6.139, + "eval_steps_per_second": 0.778, + "epoch": 3.0 +} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/gguf/gemma2_9b__fft__instruct__masked_q8_0.gguf b/gemma2/gemma2_9b__fft__instruct__masked/gguf/gemma2_9b__fft__instruct__masked_q8_0.gguf new file mode 100644 index 0000000000000000000000000000000000000000..8c190e2537bca69d3ecc7eed260f46121fe71aa5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/gguf/gemma2_9b__fft__instruct__masked_q8_0.gguf @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b62e9389dd1055920daeb028882d5c7e175840533de08792d628d3ce977eec85 +size 9827148576 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/chat_template.jinja b/gemma2/gemma2_9b__fft__instruct__masked/merged/chat_template.jinja new file mode 100644 index 0000000000000000000000000000000000000000..923ec253c8dbefbb41cf084db7251df41d000f6d --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/chat_template.jinja @@ -0,0 +1,4 @@ +{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + ' +' + message['content'] | trim + ' +' }}{% endfor %}{% if add_generation_prompt %}{{'model +'}}{% endif %} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/chat_template.jinja b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/chat_template.jinja new file mode 100644 index 0000000000000000000000000000000000000000..923ec253c8dbefbb41cf084db7251df41d000f6d --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/chat_template.jinja @@ -0,0 +1,4 @@ +{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + ' +' + message['content'] | trim + ' +' }}{% endfor %}{% if add_generation_prompt %}{{'model +'}}{% endif %} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2b1c61ddb476d5def6121706e2d418bbe700f4f5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "Gemma2ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": 50.0, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "final_logit_softcapping": 30.0, + "head_dim": 256, + "hidden_act": "gelu_pytorch_tanh", + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 3584, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 8192, + "model_type": "gemma2", + "num_attention_heads": 16, + "num_hidden_layers": 42, + "num_key_value_heads": 8, + "pad_token_id": 0, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_theta": 10000.0, + "sliding_window": 4096, + "sliding_window_size": 4096, + "torch_dtype": "bfloat16", + "transformers_version": "4.52.4", + "use_cache": false, + "vocab_size": 256000 +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/generation_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7c4f1b1f4d5abfaef8f03bbf8a4681ad628ceb70 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/generation_config.json @@ -0,0 +1,8 @@ +{ + "_from_model_config": true, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.52.4" +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00001-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00001-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5905e31203cc10092fe6ec91faf0ca689e595df4 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00001-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8d26af890be856b9d6498061fc9c02b2253b4688570d6c9762e5fb7ddeace045 +size 4903351912 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00002-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00002-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e6fe7f18ecddba33b01c00cbb73cfef063486937 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00002-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:169006a695cc70ffc09582a09a73c31105df0e03ed2adef6fef3187f79f19e9d +size 4947570872 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00003-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00003-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..179d74a9963a5e7990a464f0801c540defe07dd5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00003-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d79c1e8d22504c5a04746899ec7ced800c738195a5ad9d7c0755317a255a3cb8 +size 4962221464 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00004-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00004-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e011c47ac6b5330ac0a196e0f49f1ad0d6d68dda --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:99da6a3e1379ba3945d16921faad88154b18a46c3080a5a412ca983f79895581 +size 3670322200 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model.safetensors.index.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..43bf0e426e9bf12945463de98856c221831faf66 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/model.safetensors.index.json @@ -0,0 +1,471 @@ +{ + "metadata": { + "total_size": 18483411968 + }, + "weight_map": { + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.33.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.40.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/optimizer.pt b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..ab40165eff04eae26771a2d079f3ab6f8c1c6440 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2e7060da32177e788b4d10884689fc6b1dd1f7e77c5314ecc1ef3af20807736 +size 18776303565 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/rng_state.pth b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..065f385e762194a148dec7fed295c58a3e7c17fa --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2534e434cd5abbb8f7668d3eab0549db0ef95d6a797a3efa86b712e8e32266a7 +size 14645 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/scheduler.pt b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..1fc3273db5670c0f26487004e88698fe9e6cdbd8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e82973834563628612426f16c2df07e087a06161ca7be3412ce0f9d8a2251f2c +size 1465 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/special_tokens_map.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..8d6368f7e735fbe4781bf6e956b7c6ad0586df80 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/special_tokens_map.json @@ -0,0 +1,34 @@ +{ + "additional_special_tokens": [ + "", + "" + ], + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..bda55fd2796bc3a106b77e25610a354c8a69b338 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6ce83119bb404f7f0a6e621b76759d476357dcd01241a90f9ca136ae2b3c11c +size 34362972 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.model b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..796efe9ab515c15e146ce7588e6d7b9b8134dbf8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2 +size 4241003 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ade6be57196932063a48c1329fbf9c2e7e154d1 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/tokenizer_config.json @@ -0,0 +1,2013 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "5": { + "content": "<2mass>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "6": { + "content": "[@BOS@]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "106": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "107": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "108": { + "content": "\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "109": { + "content": "\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "110": { + "content": "\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "111": { + "content": "\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "112": { + "content": "\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "113": { + "content": "\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "114": { + "content": "\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "115": { + "content": "\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "116": { + "content": "\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "117": { + "content": "\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "118": { + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "119": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "120": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "121": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "122": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "123": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "124": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "125": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "126": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "127": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "128": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "129": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "130": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "131": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "132": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "133": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "134": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "135": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "136": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "137": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "138": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "139": { + "content": "▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "140": { + "content": "▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "141": { + "content": "▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "142": { + "content": "▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "143": { + "content": "▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "144": { + "content": "▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "145": { + "content": "▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "146": { + "content": "▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "147": { + "content": "▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "148": { + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "149": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "150": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "151": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "152": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "153": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "154": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "155": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "156": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "157": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "158": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "159": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "160": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "161": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "162": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "163": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "164": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "165": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "166": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "167": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "168": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "169": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "170": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "172": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "173": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "174": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "175": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "171": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "176": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "177": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "178": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "179": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "180": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "181": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "182": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "183": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "184": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "185": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "186": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "187": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "188": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "189": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "190": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "191": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "192": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "193": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "194": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "195": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "196": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "197": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "198": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "199": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "200": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "201": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "202": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "203": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "204": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "205": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "206": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "207": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "208": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "209": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "210": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "211": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "212": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "213": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "214": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "215": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "216": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255968": { + "content": "[toxicity=0]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255969": { + "content": "\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255970": { + "content": "\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255971": { + "content": "\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255972": { + "content": "\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255973": { + "content": "\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255974": { + "content": "\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255975": { + "content": "\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255976": { + "content": "\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255977": { + "content": "\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255978": { + "content": "\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255979": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255980": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255981": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255982": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255983": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255984": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255985": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255986": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255987": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255988": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255989": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255990": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255991": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255992": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255993": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255994": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255995": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255996": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255997": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255998": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255999": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + } + }, + "additional_special_tokens": [ + "", + "" + ], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_special_tokens": {}, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "GemmaTokenizer", + "unk_token": "", + "use_default_system_prompt": false +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/trainer_state.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..925eba45cf4eca9cc9ea8f6083cbe67d9cfff81c --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/trainer_state.json @@ -0,0 +1,115 @@ +{ + "best_global_step": null, + "best_metric": Infinity, + "best_model_checkpoint": null, + "epoch": 0.7768875940762321, + "eval_steps": 100, + "global_step": 200, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.09711094925952901, + "grad_norm": 9.0, + "learning_rate": 2e-05, + "loss": 1.5677, + "step": 25 + }, + { + "epoch": 0.19422189851905802, + "grad_norm": 9.0625, + "learning_rate": 1.9333333333333333e-05, + "loss": 1.241, + "step": 50 + }, + { + "epoch": 0.29133284777858703, + "grad_norm": 6.96875, + "learning_rate": 1.866666666666667e-05, + "loss": 1.2122, + "step": 75 + }, + { + "epoch": 0.38844379703811605, + "grad_norm": 7.875, + "learning_rate": 1.8e-05, + "loss": 1.1718, + "step": 100 + }, + { + "epoch": 0.38844379703811605, + "eval_loss": NaN, + "eval_runtime": 22.1669, + "eval_samples_per_second": 9.789, + "eval_steps_per_second": 9.789, + "step": 100 + }, + { + "epoch": 0.48555474629764506, + "grad_norm": 7.71875, + "learning_rate": 1.7333333333333336e-05, + "loss": 1.1665, + "step": 125 + }, + { + "epoch": 0.5826656955571741, + "grad_norm": 7.5, + "learning_rate": 1.6666666666666667e-05, + "loss": 1.1739, + "step": 150 + }, + { + "epoch": 0.6797766448167031, + "grad_norm": 6.875, + "learning_rate": 1.6000000000000003e-05, + "loss": 1.1071, + "step": 175 + }, + { + "epoch": 0.7768875940762321, + "grad_norm": 8.6875, + "learning_rate": 1.5333333333333334e-05, + "loss": 1.104, + "step": 200 + }, + { + "epoch": 0.7768875940762321, + "eval_loss": NaN, + "eval_runtime": 22.2611, + "eval_samples_per_second": 9.748, + "eval_steps_per_second": 9.748, + "step": 200 + } + ], + "logging_steps": 25, + "max_steps": 774, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 100, + "stateful_callbacks": { + "EarlyStoppingCallback": { + "args": { + "early_stopping_patience": 3, + "early_stopping_threshold": 0.0 + }, + "attributes": { + "early_stopping_patience_counter": 1 + } + }, + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 1.072636356605399e+17, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/training_args.bin b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..c472bbe549d517f9f19f2c10077c672bdcaee5f2 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-200/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d01aa2850197d71abb4ce801639374be528686772e506ff6dcc138d8b2f548f7 +size 5777 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/chat_template.jinja b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/chat_template.jinja new file mode 100644 index 0000000000000000000000000000000000000000..923ec253c8dbefbb41cf084db7251df41d000f6d --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/chat_template.jinja @@ -0,0 +1,4 @@ +{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + ' +' + message['content'] | trim + ' +' }}{% endfor %}{% if add_generation_prompt %}{{'model +'}}{% endif %} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2b1c61ddb476d5def6121706e2d418bbe700f4f5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "Gemma2ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": 50.0, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "final_logit_softcapping": 30.0, + "head_dim": 256, + "hidden_act": "gelu_pytorch_tanh", + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 3584, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 8192, + "model_type": "gemma2", + "num_attention_heads": 16, + "num_hidden_layers": 42, + "num_key_value_heads": 8, + "pad_token_id": 0, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_theta": 10000.0, + "sliding_window": 4096, + "sliding_window_size": 4096, + "torch_dtype": "bfloat16", + "transformers_version": "4.52.4", + "use_cache": false, + "vocab_size": 256000 +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/generation_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7c4f1b1f4d5abfaef8f03bbf8a4681ad628ceb70 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/generation_config.json @@ -0,0 +1,8 @@ +{ + "_from_model_config": true, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.52.4" +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00001-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00001-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..1435668f4b10db21f8f7de4ccad428be01e571cb --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00001-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:aada41271afe0a63bf429f28e01aff1143f04c129efb06f050c690379316745c +size 4903351912 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00002-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00002-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2dd85ea0d31bd1442e759376218373f1bf92d9fc --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00002-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:680c45264c1702e7ed6ae1033ebc02b7b88c0254902cf8d9105884b08dd941af +size 4947570872 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00003-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00003-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7c17b723e456147c9263a90ac0a223eedd773d59 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00003-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:97167fe0c5277399afc4c79aff24ea744b1eff4d12b4d0df3078f35bc1f5b72f +size 4962221464 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00004-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00004-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fd39cd4d23ce9fcce49bd38361306f5cfdfbddd7 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:84f6c1b6a4f93364626c39e2a4acf8cbf303efff7a0d2145b203b5725ae2bcea +size 3670322200 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model.safetensors.index.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..43bf0e426e9bf12945463de98856c221831faf66 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/model.safetensors.index.json @@ -0,0 +1,471 @@ +{ + "metadata": { + "total_size": 18483411968 + }, + "weight_map": { + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.33.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.40.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/optimizer.pt b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..649026cf17d90dc7ff386d8ad64d6d226d3fd180 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9508997c5c71ecb933b935c37bc1365fa9a9f0d64dca2b72cbe2f56bdd72d1c +size 18776304013 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/rng_state.pth b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..7c8e918dcb7d690dfe4baf0182ea6cb4bc24a427 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e5b517d1b8e2b0f837c8b00170b154961d4d989feba4326ac25583df7a55c57a +size 14645 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/scheduler.pt b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..836bf2c401dcb40b3a6675d3565d0d8e406c147f --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c2b34d51f09e53d1743e55c9c100a37e7f558d1aa9d3051217835005bd6f9300 +size 1465 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/special_tokens_map.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..8d6368f7e735fbe4781bf6e956b7c6ad0586df80 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/special_tokens_map.json @@ -0,0 +1,34 @@ +{ + "additional_special_tokens": [ + "", + "" + ], + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..bda55fd2796bc3a106b77e25610a354c8a69b338 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6ce83119bb404f7f0a6e621b76759d476357dcd01241a90f9ca136ae2b3c11c +size 34362972 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.model b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..796efe9ab515c15e146ce7588e6d7b9b8134dbf8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2 +size 4241003 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ade6be57196932063a48c1329fbf9c2e7e154d1 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/tokenizer_config.json @@ -0,0 +1,2013 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "5": { + "content": "<2mass>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "6": { + "content": "[@BOS@]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "106": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "107": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "108": { + "content": "\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "109": { + "content": "\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "110": { + "content": "\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "111": { + "content": "\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "112": { + "content": "\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "113": { + "content": "\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "114": { + "content": "\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "115": { + "content": "\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "116": { + "content": "\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "117": { + "content": "\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "118": { + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "119": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "120": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "121": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "122": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "123": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "124": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "125": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "126": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "127": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "128": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "129": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "130": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "131": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "132": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "133": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "134": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "135": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "136": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "137": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "138": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "139": { + "content": "▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "140": { + "content": "▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "141": { + "content": "▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "142": { + "content": "▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "143": { + "content": "▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "144": { + "content": "▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "145": { + "content": "▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "146": { + "content": "▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "147": { + "content": "▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "148": { + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "149": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "150": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "151": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "152": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "153": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "154": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "155": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "156": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "157": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "158": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "159": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "160": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "161": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "162": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "163": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "164": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "165": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "166": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "167": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "168": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "169": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "170": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "172": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "173": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "174": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "175": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "171": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "176": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "177": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "178": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "179": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "180": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "181": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "182": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "183": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "184": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "185": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "186": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "187": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "188": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "189": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "190": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "191": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "192": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "193": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "194": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "195": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "196": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "197": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "198": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "199": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "200": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "201": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "202": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "203": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "204": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "205": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "206": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "207": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "208": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "209": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "210": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "211": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "212": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "213": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "214": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "215": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "216": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255968": { + "content": "[toxicity=0]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255969": { + "content": "\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255970": { + "content": "\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255971": { + "content": "\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255972": { + "content": "\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255973": { + "content": "\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255974": { + "content": "\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255975": { + "content": "\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255976": { + "content": "\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255977": { + "content": "\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255978": { + "content": "\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255979": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255980": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255981": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255982": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255983": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255984": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255985": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255986": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255987": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255988": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255989": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255990": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255991": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255992": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255993": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255994": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255995": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255996": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255997": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255998": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255999": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + } + }, + "additional_special_tokens": [ + "", + "" + ], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_special_tokens": {}, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "GemmaTokenizer", + "unk_token": "", + "use_default_system_prompt": false +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/trainer_state.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..4036e92c53aa7d986eb9962129b232e57c7d7ecf --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/trainer_state.json @@ -0,0 +1,151 @@ +{ + "best_global_step": null, + "best_metric": Infinity, + "best_model_checkpoint": null, + "epoch": 1.1631463947560088, + "eval_steps": 100, + "global_step": 300, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.09711094925952901, + "grad_norm": 9.0, + "learning_rate": 2e-05, + "loss": 1.5677, + "step": 25 + }, + { + "epoch": 0.19422189851905802, + "grad_norm": 9.0625, + "learning_rate": 1.9333333333333333e-05, + "loss": 1.241, + "step": 50 + }, + { + "epoch": 0.29133284777858703, + "grad_norm": 6.96875, + "learning_rate": 1.866666666666667e-05, + "loss": 1.2122, + "step": 75 + }, + { + "epoch": 0.38844379703811605, + "grad_norm": 7.875, + "learning_rate": 1.8e-05, + "loss": 1.1718, + "step": 100 + }, + { + "epoch": 0.38844379703811605, + "eval_loss": NaN, + "eval_runtime": 22.1669, + "eval_samples_per_second": 9.789, + "eval_steps_per_second": 9.789, + "step": 100 + }, + { + "epoch": 0.48555474629764506, + "grad_norm": 7.71875, + "learning_rate": 1.7333333333333336e-05, + "loss": 1.1665, + "step": 125 + }, + { + "epoch": 0.5826656955571741, + "grad_norm": 7.5, + "learning_rate": 1.6666666666666667e-05, + "loss": 1.1739, + "step": 150 + }, + { + "epoch": 0.6797766448167031, + "grad_norm": 6.875, + "learning_rate": 1.6000000000000003e-05, + "loss": 1.1071, + "step": 175 + }, + { + "epoch": 0.7768875940762321, + "grad_norm": 8.6875, + "learning_rate": 1.5333333333333334e-05, + "loss": 1.104, + "step": 200 + }, + { + "epoch": 0.7768875940762321, + "eval_loss": NaN, + "eval_runtime": 22.2611, + "eval_samples_per_second": 9.748, + "eval_steps_per_second": 9.748, + "step": 200 + }, + { + "epoch": 0.8739985433357611, + "grad_norm": 6.03125, + "learning_rate": 1.4666666666666666e-05, + "loss": 1.0936, + "step": 225 + }, + { + "epoch": 0.9711094925952901, + "grad_norm": 7.65625, + "learning_rate": 1.4e-05, + "loss": 1.0604, + "step": 250 + }, + { + "epoch": 1.0660354454964798, + "grad_norm": 8.5625, + "learning_rate": 1.3333333333333333e-05, + "loss": 0.6133, + "step": 275 + }, + { + "epoch": 1.1631463947560088, + "grad_norm": 7.0625, + "learning_rate": 1.2666666666666667e-05, + "loss": 0.3797, + "step": 300 + }, + { + "epoch": 1.1631463947560088, + "eval_loss": NaN, + "eval_runtime": 21.9634, + "eval_samples_per_second": 9.88, + "eval_steps_per_second": 9.88, + "step": 300 + } + ], + "logging_steps": 25, + "max_steps": 774, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 100, + "stateful_callbacks": { + "EarlyStoppingCallback": { + "args": { + "early_stopping_patience": 3, + "early_stopping_threshold": 0.0 + }, + "attributes": { + "early_stopping_patience_counter": 2 + } + }, + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": false + }, + "attributes": {} + } + }, + "total_flos": 1.609857544339323e+17, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/training_args.bin b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..c472bbe549d517f9f19f2c10077c672bdcaee5f2 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-300/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d01aa2850197d71abb4ce801639374be528686772e506ff6dcc138d8b2f548f7 +size 5777 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/chat_template.jinja b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/chat_template.jinja new file mode 100644 index 0000000000000000000000000000000000000000..923ec253c8dbefbb41cf084db7251df41d000f6d --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/chat_template.jinja @@ -0,0 +1,4 @@ +{{ bos_token }}{% if messages[0]['role'] == 'system' %}{{ raise_exception('System role not supported') }}{% endif %}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if (message['role'] == 'assistant') %}{% set role = 'model' %}{% else %}{% set role = message['role'] %}{% endif %}{{ '' + role + ' +' + message['content'] | trim + ' +' }}{% endfor %}{% if add_generation_prompt %}{{'model +'}}{% endif %} \ No newline at end of file diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2b1c61ddb476d5def6121706e2d418bbe700f4f5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "Gemma2ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": 50.0, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "final_logit_softcapping": 30.0, + "head_dim": 256, + "hidden_act": "gelu_pytorch_tanh", + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 3584, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 8192, + "model_type": "gemma2", + "num_attention_heads": 16, + "num_hidden_layers": 42, + "num_key_value_heads": 8, + "pad_token_id": 0, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_theta": 10000.0, + "sliding_window": 4096, + "sliding_window_size": 4096, + "torch_dtype": "bfloat16", + "transformers_version": "4.52.4", + "use_cache": false, + "vocab_size": 256000 +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/generation_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7c4f1b1f4d5abfaef8f03bbf8a4681ad628ceb70 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/generation_config.json @@ -0,0 +1,8 @@ +{ + "_from_model_config": true, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.52.4" +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00001-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00001-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..17042eaf39f453174a2251d3e2de2115f3f095e5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00001-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afff2eeb3fd03674c8eb4ec3496aec2cff6c8cda9ae768fea09331e427b87923 +size 4903351912 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00002-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00002-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0216f4561ead9aaf7884827c6cecdba6e322c09c --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00002-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91ee71af6367a23880226c27bb72b38de7558aa9b8c4e57ad744527bc166e241 +size 4947570872 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00003-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00003-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6842e2664f0da4a7d02d8b818f99defa9138bf43 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00003-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:beb9ac0fd197e4460d29fa230ff1db64da38ce3c94cd4e4e60959f3be5718dd2 +size 4962221464 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00004-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00004-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8b66f2afea07f0b52d881dde26747a52efebfa8c --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1148f716544c3c4067f701c77c4abdabb483c6444e89ccc636960ea03dbaacd7 +size 3670322200 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model.safetensors.index.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..43bf0e426e9bf12945463de98856c221831faf66 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/model.safetensors.index.json @@ -0,0 +1,471 @@ +{ + "metadata": { + "total_size": 18483411968 + }, + "weight_map": { + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.33.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.40.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/optimizer.pt b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/optimizer.pt new file mode 100644 index 0000000000000000000000000000000000000000..b8da3bff49788e881b502442abe2622dfc053d50 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/optimizer.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:07d4b386c738d094807e629cdee55c786c5702dc0a6570b47d3c05af31d27717 +size 18776304013 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/rng_state.pth b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/rng_state.pth new file mode 100644 index 0000000000000000000000000000000000000000..1e385ace93ad48efb33fbe42c93cd2ae538e1f75 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/rng_state.pth @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9fd9432b50864b1799d071a2391f13a188cc959e985675ab69fd688672db2853 +size 14645 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/scheduler.pt b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/scheduler.pt new file mode 100644 index 0000000000000000000000000000000000000000..4c0f1adc99b567442b6a388e08600c4d48cf76c4 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/scheduler.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a9b08bfc330b53aba72dd6e10fe131aac1060248a0629484ab35abe30890ac23 +size 1465 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/special_tokens_map.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..8d6368f7e735fbe4781bf6e956b7c6ad0586df80 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/special_tokens_map.json @@ -0,0 +1,34 @@ +{ + "additional_special_tokens": [ + "", + "" + ], + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..bda55fd2796bc3a106b77e25610a354c8a69b338 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6ce83119bb404f7f0a6e621b76759d476357dcd01241a90f9ca136ae2b3c11c +size 34362972 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.model b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..796efe9ab515c15e146ce7588e6d7b9b8134dbf8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2 +size 4241003 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ade6be57196932063a48c1329fbf9c2e7e154d1 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/tokenizer_config.json @@ -0,0 +1,2013 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "5": { + "content": "<2mass>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "6": { + "content": "[@BOS@]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "106": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "107": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "108": { + "content": "\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "109": { + "content": "\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "110": { + "content": "\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "111": { + "content": "\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "112": { + "content": "\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "113": { + "content": "\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "114": { + "content": "\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "115": { + "content": "\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "116": { + "content": "\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "117": { + "content": "\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "118": { + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "119": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "120": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "121": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "122": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "123": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "124": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "125": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "126": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "127": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "128": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "129": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "130": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "131": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "132": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "133": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "134": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "135": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "136": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "137": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "138": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "139": { + "content": "▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "140": { + "content": "▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "141": { + "content": "▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "142": { + "content": "▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "143": { + "content": "▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "144": { + "content": "▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "145": { + "content": "▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "146": { + "content": "▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "147": { + "content": "▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "148": { + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "149": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "150": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "151": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "152": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "153": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "154": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "155": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "156": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "157": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "158": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "159": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "160": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "161": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "162": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "163": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "164": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "165": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "166": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "167": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "168": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "169": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "170": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "172": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "173": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "174": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "175": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "171": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "176": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "177": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "178": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "179": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "180": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "181": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "182": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "183": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "184": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "185": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "186": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "187": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "188": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "189": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "190": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "191": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "192": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "193": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "194": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "195": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "196": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "197": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "198": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "199": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "200": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "201": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "202": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "203": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "204": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "205": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "206": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "207": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "208": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "209": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "210": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "211": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "212": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "213": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "214": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "215": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "216": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255968": { + "content": "[toxicity=0]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255969": { + "content": "\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255970": { + "content": "\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255971": { + "content": "\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255972": { + "content": "\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255973": { + "content": "\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255974": { + "content": "\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255975": { + "content": "\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255976": { + "content": "\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255977": { + "content": "\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255978": { + "content": "\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255979": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255980": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255981": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255982": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255983": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255984": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255985": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255986": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255987": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255988": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255989": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255990": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255991": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255992": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255993": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255994": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255995": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255996": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255997": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255998": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255999": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + } + }, + "additional_special_tokens": [ + "", + "" + ], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_special_tokens": {}, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "GemmaTokenizer", + "unk_token": "", + "use_default_system_prompt": false +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/trainer_state.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/trainer_state.json new file mode 100644 index 0000000000000000000000000000000000000000..91df8132ec0453b5d486d69565499e53e2f9aa90 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/trainer_state.json @@ -0,0 +1,187 @@ +{ + "best_global_step": null, + "best_metric": Infinity, + "best_model_checkpoint": null, + "epoch": 1.5515901917941248, + "eval_steps": 100, + "global_step": 400, + "is_hyper_param_search": false, + "is_local_process_zero": true, + "is_world_process_zero": true, + "log_history": [ + { + "epoch": 0.09711094925952901, + "grad_norm": 9.0, + "learning_rate": 2e-05, + "loss": 1.5677, + "step": 25 + }, + { + "epoch": 0.19422189851905802, + "grad_norm": 9.0625, + "learning_rate": 1.9333333333333333e-05, + "loss": 1.241, + "step": 50 + }, + { + "epoch": 0.29133284777858703, + "grad_norm": 6.96875, + "learning_rate": 1.866666666666667e-05, + "loss": 1.2122, + "step": 75 + }, + { + "epoch": 0.38844379703811605, + "grad_norm": 7.875, + "learning_rate": 1.8e-05, + "loss": 1.1718, + "step": 100 + }, + { + "epoch": 0.38844379703811605, + "eval_loss": NaN, + "eval_runtime": 22.1669, + "eval_samples_per_second": 9.789, + "eval_steps_per_second": 9.789, + "step": 100 + }, + { + "epoch": 0.48555474629764506, + "grad_norm": 7.71875, + "learning_rate": 1.7333333333333336e-05, + "loss": 1.1665, + "step": 125 + }, + { + "epoch": 0.5826656955571741, + "grad_norm": 7.5, + "learning_rate": 1.6666666666666667e-05, + "loss": 1.1739, + "step": 150 + }, + { + "epoch": 0.6797766448167031, + "grad_norm": 6.875, + "learning_rate": 1.6000000000000003e-05, + "loss": 1.1071, + "step": 175 + }, + { + "epoch": 0.7768875940762321, + "grad_norm": 8.6875, + "learning_rate": 1.5333333333333334e-05, + "loss": 1.104, + "step": 200 + }, + { + "epoch": 0.7768875940762321, + "eval_loss": NaN, + "eval_runtime": 22.2611, + "eval_samples_per_second": 9.748, + "eval_steps_per_second": 9.748, + "step": 200 + }, + { + "epoch": 0.8739985433357611, + "grad_norm": 6.03125, + "learning_rate": 1.4666666666666666e-05, + "loss": 1.0936, + "step": 225 + }, + { + "epoch": 0.9711094925952901, + "grad_norm": 7.65625, + "learning_rate": 1.4e-05, + "loss": 1.0604, + "step": 250 + }, + { + "epoch": 1.0660354454964798, + "grad_norm": 8.5625, + "learning_rate": 1.3333333333333333e-05, + "loss": 0.6133, + "step": 275 + }, + { + "epoch": 1.1631463947560088, + "grad_norm": 7.0625, + "learning_rate": 1.2666666666666667e-05, + "loss": 0.3797, + "step": 300 + }, + { + "epoch": 1.1631463947560088, + "eval_loss": NaN, + "eval_runtime": 21.9634, + "eval_samples_per_second": 9.88, + "eval_steps_per_second": 9.88, + "step": 300 + }, + { + "epoch": 1.2602573440155378, + "grad_norm": 6.9375, + "learning_rate": 1.2e-05, + "loss": 0.3776, + "step": 325 + }, + { + "epoch": 1.3573682932750668, + "grad_norm": 7.625, + "learning_rate": 1.1333333333333334e-05, + "loss": 0.3634, + "step": 350 + }, + { + "epoch": 1.4544792425345958, + "grad_norm": 6.375, + "learning_rate": 1.0666666666666667e-05, + "loss": 0.3604, + "step": 375 + }, + { + "epoch": 1.5515901917941248, + "grad_norm": 7.0, + "learning_rate": 1e-05, + "loss": 0.3631, + "step": 400 + }, + { + "epoch": 1.5515901917941248, + "eval_loss": NaN, + "eval_runtime": 22.1023, + "eval_samples_per_second": 9.818, + "eval_steps_per_second": 9.818, + "step": 400 + } + ], + "logging_steps": 25, + "max_steps": 774, + "num_input_tokens_seen": 0, + "num_train_epochs": 3, + "save_steps": 100, + "stateful_callbacks": { + "EarlyStoppingCallback": { + "args": { + "early_stopping_patience": 3, + "early_stopping_threshold": 0.0 + }, + "attributes": { + "early_stopping_patience_counter": 3 + } + }, + "TrainerControl": { + "args": { + "should_epoch_stop": false, + "should_evaluate": false, + "should_log": false, + "should_save": true, + "should_training_stop": true + }, + "attributes": {} + } + }, + "total_flos": 2.1461487522275942e+17, + "train_batch_size": 1, + "trial_name": null, + "trial_params": null +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/training_args.bin b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..c472bbe549d517f9f19f2c10077c672bdcaee5f2 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/checkpoint-400/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d01aa2850197d71abb4ce801639374be528686772e506ff6dcc138d8b2f548f7 +size 5777 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/config.json new file mode 100644 index 0000000000000000000000000000000000000000..2b1c61ddb476d5def6121706e2d418bbe700f4f5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "Gemma2ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "attn_logit_softcapping": 50.0, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "final_logit_softcapping": 30.0, + "head_dim": 256, + "hidden_act": "gelu_pytorch_tanh", + "hidden_activation": "gelu_pytorch_tanh", + "hidden_size": 3584, + "initializer_range": 0.02, + "intermediate_size": 14336, + "max_position_embeddings": 8192, + "model_type": "gemma2", + "num_attention_heads": 16, + "num_hidden_layers": 42, + "num_key_value_heads": 8, + "pad_token_id": 0, + "query_pre_attn_scalar": 256, + "rms_norm_eps": 1e-06, + "rope_theta": 10000.0, + "sliding_window": 4096, + "sliding_window_size": 4096, + "torch_dtype": "bfloat16", + "transformers_version": "4.52.4", + "use_cache": false, + "vocab_size": 256000 +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/generation_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..7c4f1b1f4d5abfaef8f03bbf8a4681ad628ceb70 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/generation_config.json @@ -0,0 +1,8 @@ +{ + "_from_model_config": true, + "bos_token_id": 2, + "cache_implementation": "hybrid", + "eos_token_id": 1, + "pad_token_id": 0, + "transformers_version": "4.52.4" +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00001-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00001-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..17042eaf39f453174a2251d3e2de2115f3f095e5 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00001-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:afff2eeb3fd03674c8eb4ec3496aec2cff6c8cda9ae768fea09331e427b87923 +size 4903351912 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00002-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00002-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..0216f4561ead9aaf7884827c6cecdba6e322c09c --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00002-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:91ee71af6367a23880226c27bb72b38de7558aa9b8c4e57ad744527bc166e241 +size 4947570872 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00003-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00003-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..6842e2664f0da4a7d02d8b818f99defa9138bf43 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00003-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:beb9ac0fd197e4460d29fa230ff1db64da38ce3c94cd4e4e60959f3be5718dd2 +size 4962221464 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00004-of-00004.safetensors b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00004-of-00004.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..8b66f2afea07f0b52d881dde26747a52efebfa8c --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/model-00004-of-00004.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1148f716544c3c4067f701c77c4abdabb483c6444e89ccc636960ea03dbaacd7 +size 3670322200 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/model.safetensors.index.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/model.safetensors.index.json new file mode 100644 index 0000000000000000000000000000000000000000..43bf0e426e9bf12945463de98856c221831faf66 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/model.safetensors.index.json @@ -0,0 +1,471 @@ +{ + "metadata": { + "total_size": 18483411968 + }, + "weight_map": { + "model.embed_tokens.weight": "model-00001-of-00004.safetensors", + "model.layers.0.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.10.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.2.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.20.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.21.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.23.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.3.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.30.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.input_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.down_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.post_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.pre_feedforward_layernorm.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.32.mlp.gate_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.mlp.up_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.32.self_attn.k_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.o_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.q_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.32.self_attn.v_proj.weight": "model-00003-of-00004.safetensors", + "model.layers.33.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.33.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.34.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.35.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.36.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.37.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.38.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.39.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.4.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.40.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.40.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.input_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.down_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.gate_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.mlp.up_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_attention_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.post_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.pre_feedforward_layernorm.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.k_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.o_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.q_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.41.self_attn.v_proj.weight": "model-00004-of-00004.safetensors", + "model.layers.5.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.input_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.down_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.post_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.pre_feedforward_layernorm.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.mlp.up_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00004.safetensors", + "model.layers.8.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.input_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.down_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.mlp.up_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.post_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.pre_feedforward_layernorm.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00004.safetensors", + "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00004.safetensors", + "model.norm.weight": "model-00004-of-00004.safetensors" + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/special_tokens_map.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/special_tokens_map.json new file mode 100644 index 0000000000000000000000000000000000000000..8d6368f7e735fbe4781bf6e956b7c6ad0586df80 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/special_tokens_map.json @@ -0,0 +1,34 @@ +{ + "additional_special_tokens": [ + "", + "" + ], + "bos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "eos_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "pad_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + }, + "unk_token": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false + } +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.json new file mode 100644 index 0000000000000000000000000000000000000000..bda55fd2796bc3a106b77e25610a354c8a69b338 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.json @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:e6ce83119bb404f7f0a6e621b76759d476357dcd01241a90f9ca136ae2b3c11c +size 34362972 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.model b/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.model new file mode 100644 index 0000000000000000000000000000000000000000..796efe9ab515c15e146ce7588e6d7b9b8134dbf8 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer.model @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:61a7b147390c64585d6c3543dd6fc636906c9af3865a5548f27f31aee1d4c8e2 +size 4241003 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer_config.json b/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer_config.json new file mode 100644 index 0000000000000000000000000000000000000000..3ade6be57196932063a48c1329fbf9c2e7e154d1 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/tokenizer_config.json @@ -0,0 +1,2013 @@ +{ + "add_bos_token": true, + "add_eos_token": false, + "added_tokens_decoder": { + "0": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "1": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "2": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "3": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "4": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "5": { + "content": "<2mass>", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "6": { + "content": "[@BOS@]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "7": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "8": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "9": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "10": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "11": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "12": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "13": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "14": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "15": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "16": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "17": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "18": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "19": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "20": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "21": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "22": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "23": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "24": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "25": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "26": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "27": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "28": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "29": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "30": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "31": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "32": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "33": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "34": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "35": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "36": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "37": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "38": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "39": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "40": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "41": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "42": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "43": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "44": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "45": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "46": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "47": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "48": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "49": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "50": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "51": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "52": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "53": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "54": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "55": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "56": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "57": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "58": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "59": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "60": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "61": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "62": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "63": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "64": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "65": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "66": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "67": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "68": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "69": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "70": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "71": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "72": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "73": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "74": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "75": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "76": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "77": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "78": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "79": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "80": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "81": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "82": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "83": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "84": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "85": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "86": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "87": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "88": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "89": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "90": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "91": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "92": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "93": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "94": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "95": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "96": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "97": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "98": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "99": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "100": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "101": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "102": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "103": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "104": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "105": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "106": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "107": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": true + }, + "108": { + "content": "\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "109": { + "content": "\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "110": { + "content": "\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "111": { + "content": "\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "112": { + "content": "\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "113": { + "content": "\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "114": { + "content": "\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "115": { + "content": "\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "116": { + "content": "\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "117": { + "content": "\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "118": { + "content": "\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "119": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "120": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "121": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "122": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "123": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "124": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "125": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "126": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "127": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "128": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "129": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "130": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "131": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "132": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "133": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "134": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "135": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "136": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "137": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "138": { + "content": "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "139": { + "content": "▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "140": { + "content": "▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "141": { + "content": "▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "142": { + "content": "▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "143": { + "content": "▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "144": { + "content": "▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "145": { + "content": "▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "146": { + "content": "▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "147": { + "content": "▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "148": { + "content": "▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "149": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "150": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "151": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "152": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "153": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "154": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "155": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "156": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "157": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "158": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "159": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "160": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "161": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "162": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "163": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "164": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "165": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "166": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "167": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "168": { + "content": "▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁▁", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "169": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "170": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "172": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "173": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "174": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "175": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "171": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "176": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "177": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "178": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "179": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "180": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "181": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "182": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "183": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "184": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "185": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "186": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "187": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "188": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "189": { + "content": "

", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "190": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "191": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "192": { + "content": "
", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "193": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "194": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "195": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "196": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "197": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "198": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "199": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "200": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "201": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "202": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "203": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "204": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "205": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "206": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "207": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "208": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "209": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "210": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "211": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "212": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "213": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "214": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "215": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "216": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255968": { + "content": "[toxicity=0]", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255969": { + "content": "\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255970": { + "content": "\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255971": { + "content": "\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255972": { + "content": "\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255973": { + "content": "\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255974": { + "content": "\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255975": { + "content": "\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255976": { + "content": "\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255977": { + "content": "\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255978": { + "content": "\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255979": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255980": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255981": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255982": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255983": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255984": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255985": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255986": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255987": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255988": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255989": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255990": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255991": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255992": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255993": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255994": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255995": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255996": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255997": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255998": { + "content": "\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + }, + "255999": { + "content": "", + "lstrip": false, + "normalized": false, + "rstrip": false, + "single_word": false, + "special": false + } + }, + "additional_special_tokens": [ + "", + "" + ], + "bos_token": "", + "clean_up_tokenization_spaces": false, + "eos_token": "", + "extra_special_tokens": {}, + "model_max_length": 1000000000000000019884624838656, + "pad_token": "", + "sp_model_kwargs": {}, + "spaces_between_special_tokens": false, + "tokenizer_class": "GemmaTokenizer", + "unk_token": "", + "use_default_system_prompt": false +} diff --git a/gemma2/gemma2_9b__fft__instruct__masked/merged/training_args.bin b/gemma2/gemma2_9b__fft__instruct__masked/merged/training_args.bin new file mode 100644 index 0000000000000000000000000000000000000000..c472bbe549d517f9f19f2c10077c672bdcaee5f2 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/merged/training_args.bin @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d01aa2850197d71abb4ce801639374be528686772e506ff6dcc138d8b2f548f7 +size 5777 diff --git a/gemma2/gemma2_9b__fft__instruct__masked/training_complete.flag b/gemma2/gemma2_9b__fft__instruct__masked/training_complete.flag new file mode 100644 index 0000000000000000000000000000000000000000..8b46a0256f676483fb4d4662826e8e6e0650b727 --- /dev/null +++ b/gemma2/gemma2_9b__fft__instruct__masked/training_complete.flag @@ -0,0 +1 @@ +Completed in 79.5 min