| { |
| "model_type": "phi3", |
| "architectures": [ |
| "Phi3ForCausalLM" |
| ], |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "auto_map": { |
| "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config", |
| "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM" |
| }, |
| "bos_token_id": 1, |
| "embd_pdrop": 0.0, |
| "eos_token_id": 32000, |
| "hidden_act": "silu", |
| "hidden_size": 3072, |
| "initializer_range": 0.02, |
| "intermediate_size": 8192, |
| "max_position_embeddings": 131072, |
| "original_max_position_embeddings": 4096, |
| "pad_token_id": 32000, |
| "resid_pdrop": 0.0, |
| "rms_norm_eps": 1e-05, |
| "rope_scaling": { |
| "long_factor": [ |
| 1.0700000524520874, |
| 1.1200000047683716, |
| 1.149999976158142, |
| 1.4199999570846558, |
| 1.5699999332427979, |
| 1.7999999523162842, |
| 2.129999876022339, |
| 2.129999876022339, |
| 3.009999990463257, |
| 5.910000324249268, |
| 6.950000286102295, |
| 9.070000648498535, |
| 9.930000305175781, |
| 10.710000038146973, |
| 11.130000114440918, |
| 14.609999656677246, |
| 15.409998893737793, |
| 19.809999465942383, |
| 37.279998779296875, |
| 38.279998779296875, |
| 38.599998474121094, |
| 40.12000274658203, |
| 46.20000457763672, |
| 50.940006256103516, |
| 53.66000747680664, |
| 54.9373893737793, |
| 56.89738845825195, |
| 57.28738784790039, |
| 59.98738479614258, |
| 60.86738586425781, |
| 60.887386322021484, |
| 61.71739196777344, |
| 62.91739273071289, |
| 62.957393646240234, |
| 63.41739273071289, |
| 63.8173942565918, |
| 63.83739471435547, |
| 63.897396087646484, |
| 63.93739700317383, |
| 64.06739807128906, |
| 64.11434936523438, |
| 64.12435150146484, |
| 64.15435028076172, |
| 64.19435119628906, |
| 64.24435424804688, |
| 64.57435607910156, |
| 64.69000244140625, |
| 64.76000213623047 |
| ], |
| "short_factor": [ |
| 1.1, |
| 1.1, |
| 1.1, |
| 1.3000000000000003, |
| 1.3500000000000003, |
| 1.3500000000000003, |
| 1.4000000000000004, |
| 1.5500000000000005, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.000000000000001, |
| 2.0500000000000007, |
| 2.0500000000000007, |
| 2.0500000000000007, |
| 2.0500000000000007, |
| 2.0500000000000007, |
| 2.0500000000000007, |
| 2.1000000000000005, |
| 2.1000000000000005, |
| 2.1500000000000004, |
| 2.25, |
| 2.25, |
| 2.25, |
| 2.25, |
| 2.25, |
| 2.3999999999999995, |
| 2.4499999999999993, |
| 2.499999999999999, |
| 2.6999999999999984, |
| 2.6999999999999984, |
| 2.7499999999999982, |
| 2.799999999999998, |
| 2.8999999999999977, |
| 3.049999999999997 |
| ], |
| "type": "longrope" |
| }, |
| "rope_theta": 10000.0, |
| "sliding_window": 262144, |
| "tie_word_embeddings": false, |
| "torch_dtype": "float32", |
| "transformers_version": "4.42.4", |
| "use_cache": true, |
| "vocab_size": 32064, |
| "peft_type": "LORA", |
| "lora_alpha": 32, |
| "lora_dropout": 0.05, |
| "r": 16, |
| "target_modules": [ |
| "model.layers.15.self_attn.o_proj", |
| "model.layers.18.mlp.gate_up_proj", |
| "model.layers.3.self_attn.o_proj", |
| "model.layers.15.mlp.down_proj", |
| "model.layers.20.self_attn.o_proj", |
| "model.layers.30.self_attn.o_proj", |
| "model.layers.6.mlp.gate_up_proj", |
| "model.layers.17.self_attn.qkv_proj", |
| "model.layers.4.self_attn.qkv_proj", |
| "model.layers.28.self_attn.o_proj", |
| "model.layers.3.mlp.down_proj", |
| "model.layers.19.mlp.down_proj", |
| "model.layers.8.self_attn.o_proj", |
| "model.layers.8.mlp.down_proj", |
| "model.layers.14.self_attn.qkv_proj", |
| "model.layers.23.mlp.gate_up_proj", |
| "model.layers.9.self_attn.qkv_proj", |
| "model.layers.15.self_attn.qkv_proj", |
| "model.layers.2.self_attn.o_proj", |
| "model.layers.26.self_attn.qkv_proj", |
| "model.layers.7.self_attn.qkv_proj", |
| "model.layers.25.self_attn.qkv_proj", |
| "model.layers.22.mlp.gate_up_proj", |
| "model.layers.8.mlp.gate_up_proj", |
| "model.layers.29.mlp.down_proj", |
| "model.layers.30.mlp.down_proj", |
| "model.layers.20.self_attn.qkv_proj", |
| "model.layers.27.self_attn.qkv_proj", |
| "model.layers.31.self_attn.o_proj", |
| "model.layers.13.mlp.down_proj", |
| "model.layers.16.mlp.down_proj", |
| "model.layers.9.mlp.down_proj", |
| "model.layers.7.mlp.gate_up_proj", |
| "model.layers.29.self_attn.qkv_proj", |
| "model.layers.29.mlp.gate_up_proj", |
| "model.layers.7.mlp.down_proj", |
| "model.layers.1.mlp.down_proj", |
| "model.layers.19.self_attn.o_proj", |
| "model.layers.27.mlp.down_proj", |
| "model.layers.26.mlp.gate_up_proj", |
| "model.layers.3.self_attn.qkv_proj", |
| "model.layers.1.mlp.gate_up_proj", |
| "model.layers.17.mlp.gate_up_proj", |
| "model.layers.14.mlp.down_proj", |
| "model.layers.2.mlp.gate_up_proj", |
| "model.layers.11.self_attn.qkv_proj", |
| "model.layers.13.mlp.gate_up_proj", |
| "model.layers.6.mlp.down_proj", |
| "model.layers.19.mlp.gate_up_proj", |
| "model.layers.22.self_attn.qkv_proj", |
| "model.layers.24.mlp.down_proj", |
| "model.layers.18.self_attn.o_proj", |
| "model.layers.10.mlp.gate_up_proj", |
| "model.layers.25.mlp.gate_up_proj", |
| "model.layers.27.mlp.gate_up_proj", |
| "model.layers.5.mlp.gate_up_proj", |
| "model.layers.4.mlp.down_proj", |
| "model.layers.3.mlp.gate_up_proj", |
| "model.layers.15.mlp.gate_up_proj", |
| "model.layers.25.self_attn.o_proj", |
| "model.layers.0.mlp.down_proj", |
| "model.layers.21.self_attn.qkv_proj", |
| "model.layers.12.self_attn.o_proj", |
| "model.layers.28.self_attn.qkv_proj", |
| "model.layers.20.mlp.gate_up_proj", |
| "model.layers.14.mlp.gate_up_proj", |
| "model.layers.20.mlp.down_proj", |
| "model.layers.18.self_attn.qkv_proj", |
| "model.layers.12.self_attn.qkv_proj", |
| "model.layers.25.mlp.down_proj", |
| "model.layers.5.self_attn.qkv_proj", |
| "model.layers.24.mlp.gate_up_proj", |
| "model.layers.23.self_attn.o_proj", |
| "model.layers.17.self_attn.o_proj", |
| "model.layers.6.self_attn.qkv_proj", |
| "model.layers.22.self_attn.o_proj", |
| "model.layers.6.self_attn.o_proj", |
| "model.layers.31.self_attn.qkv_proj", |
| "model.layers.21.self_attn.o_proj", |
| "model.layers.7.self_attn.o_proj", |
| "model.layers.24.self_attn.qkv_proj", |
| "model.layers.16.self_attn.o_proj", |
| "model.layers.16.self_attn.qkv_proj", |
| "model.layers.12.mlp.down_proj", |
| "model.layers.28.mlp.gate_up_proj", |
| "model.layers.0.mlp.gate_up_proj", |
| "model.layers.9.self_attn.o_proj", |
| "model.layers.9.mlp.gate_up_proj", |
| "model.layers.5.self_attn.o_proj", |
| "model.layers.22.mlp.down_proj", |
| "model.layers.24.self_attn.o_proj", |
| "model.layers.10.self_attn.qkv_proj", |
| "model.layers.1.self_attn.qkv_proj", |
| "model.layers.2.mlp.down_proj", |
| "model.layers.0.self_attn.qkv_proj", |
| "model.layers.8.self_attn.qkv_proj", |
| "model.layers.27.self_attn.o_proj", |
| "model.layers.5.mlp.down_proj", |
| "model.layers.0.self_attn.o_proj", |
| "model.layers.26.self_attn.o_proj", |
| "model.layers.29.self_attn.o_proj", |
| "model.layers.17.mlp.down_proj", |
| "model.layers.12.mlp.gate_up_proj", |
| "model.layers.10.mlp.down_proj", |
| "model.layers.14.self_attn.o_proj", |
| "model.layers.13.self_attn.o_proj", |
| "model.layers.11.self_attn.o_proj", |
| "model.layers.23.mlp.down_proj", |
| "model.layers.19.self_attn.qkv_proj", |
| "model.layers.31.mlp.down_proj", |
| "model.layers.23.self_attn.qkv_proj", |
| "model.layers.31.mlp.gate_up_proj", |
| "model.layers.30.mlp.gate_up_proj", |
| "model.layers.4.mlp.gate_up_proj", |
| "model.layers.4.self_attn.o_proj", |
| "model.layers.2.self_attn.qkv_proj", |
| "model.layers.11.mlp.gate_up_proj", |
| "model.layers.13.self_attn.qkv_proj", |
| "model.layers.21.mlp.gate_up_proj", |
| "model.layers.18.mlp.down_proj", |
| "model.layers.26.mlp.down_proj", |
| "model.layers.10.self_attn.o_proj", |
| "model.layers.11.mlp.down_proj", |
| "model.layers.21.mlp.down_proj", |
| "model.layers.30.self_attn.qkv_proj", |
| "model.layers.1.self_attn.o_proj", |
| "model.layers.28.mlp.down_proj", |
| "model.layers.16.mlp.gate_up_proj" |
| ], |
| "num_hidden_layers": 32 |
| } |