| { | |
| "_name_or_path": "lllyasviel/omost-phi-3-mini-128k", | |
| "architectures": [ | |
| "Phi3ForCausalLM" | |
| ], | |
| "attention_dropout": 0.0, | |
| "auto_map": { | |
| "AutoConfig": "microsoft/Phi-3-mini-128k-instruct--configuration_phi3.Phi3Config", | |
| "AutoModelForCausalLM": "microsoft/Phi-3-mini-128k-instruct--modeling_phi3.Phi3ForCausalLM" | |
| }, | |
| "bos_token_id": 1, | |
| "embd_pdrop": 0.0, | |
| "eos_token_id": 32000, | |
| "hidden_act": "silu", | |
| "hidden_size": 3072, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 8192, | |
| "max_position_embeddings": 131072, | |
| "model_type": "phi3", | |
| "num_attention_heads": 32, | |
| "num_hidden_layers": 32, | |
| "num_key_value_heads": 32, | |
| "original_max_position_embeddings": 4096, | |
| "pad_token_id": 32000, | |
| "quantization_config": { | |
| "algo_config": { | |
| "model_decoder_layers": "model.layers", | |
| "name": "awq", | |
| "num_attention_heads": -1, | |
| "num_key_value_heads": -1, | |
| "scaling_layers": [ | |
| { | |
| "inp": "self_attn.qkv_proj", | |
| "layers": [ | |
| "self_attn.qkv_proj" | |
| ], | |
| "module2inspect": "self_attn", | |
| "prev_op": "input_layernorm" | |
| }, | |
| { | |
| "inp": "self_attn.o_proj", | |
| "layers": [ | |
| "self_attn.o_proj" | |
| ], | |
| "prev_op": "self_attn.qkv_proj" | |
| }, | |
| { | |
| "inp": "mlp.gate_up_proj", | |
| "layers": [ | |
| "mlp.gate_up_proj" | |
| ], | |
| "module2inspect": "mlp", | |
| "prev_op": "post_attention_layernorm" | |
| }, | |
| { | |
| "inp": "mlp.down_proj", | |
| "layers": [ | |
| "mlp.down_proj" | |
| ], | |
| "prev_op": "mlp.gate_up_proj" | |
| } | |
| ] | |
| }, | |
| "exclude": [], | |
| "export": { | |
| "kv_cache_group": [], | |
| "pack_method": "reorder", | |
| "weight_format": "real_quantized", | |
| "weight_merge_groups": null | |
| }, | |
| "global_quant_config": { | |
| "bias": null, | |
| "input_tensors": null, | |
| "output_tensors": null, | |
| "target_device": null, | |
| "weight": { | |
| "ch_axis": 1, | |
| "dtype": "uint4", | |
| "group_size": 128, | |
| "is_dynamic": false, | |
| "observer_cls": "PerGroupMinMaxObserver", | |
| "qscheme": "per_group", | |
| "round_method": "half_even", | |
| "scale_type": "float", | |
| "symmetric": false | |
| } | |
| }, | |
| "layer_quant_config": {}, | |
| "layer_type_quant_config": {}, | |
| "quant_method": "quark", | |
| "quant_mode": "eager_mode" | |
| }, | |
| "resid_pdrop": 0.0, | |
| "rms_norm_eps": 1e-05, | |
| "rope_scaling": { | |
| "long_factor": [ | |
| 1.0299999713897705, | |
| 1.0499999523162842, | |
| 1.0499999523162842, | |
| 1.0799999237060547, | |
| 1.2299998998641968, | |
| 1.2299998998641968, | |
| 1.2999999523162842, | |
| 1.4499999284744263, | |
| 1.5999999046325684, | |
| 1.6499998569488525, | |
| 1.8999998569488525, | |
| 2.859999895095825, | |
| 3.68999981880188, | |
| 5.419999599456787, | |
| 5.489999771118164, | |
| 5.489999771118164, | |
| 9.09000015258789, | |
| 11.579999923706055, | |
| 15.65999984741211, | |
| 15.769999504089355, | |
| 15.789999961853027, | |
| 18.360000610351562, | |
| 21.989999771118164, | |
| 23.079999923706055, | |
| 30.009998321533203, | |
| 32.35000228881836, | |
| 32.590003967285156, | |
| 35.56000518798828, | |
| 39.95000457763672, | |
| 53.840003967285156, | |
| 56.20000457763672, | |
| 57.95000457763672, | |
| 59.29000473022461, | |
| 59.77000427246094, | |
| 59.920005798339844, | |
| 61.190006256103516, | |
| 61.96000671386719, | |
| 62.50000762939453, | |
| 63.3700065612793, | |
| 63.48000717163086, | |
| 63.48000717163086, | |
| 63.66000747680664, | |
| 63.850006103515625, | |
| 64.08000946044922, | |
| 64.760009765625, | |
| 64.80001068115234, | |
| 64.81001281738281, | |
| 64.81001281738281 | |
| ], | |
| "short_factor": [ | |
| 1.05, | |
| 1.05, | |
| 1.05, | |
| 1.1, | |
| 1.1, | |
| 1.1500000000000001, | |
| 1.2000000000000002, | |
| 1.2500000000000002, | |
| 1.3000000000000003, | |
| 1.3500000000000003, | |
| 1.5000000000000004, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.0500000000000007, | |
| 2.0500000000000007, | |
| 2.0500000000000007, | |
| 2.1000000000000005, | |
| 2.1000000000000005, | |
| 2.1000000000000005, | |
| 2.1500000000000004, | |
| 2.1500000000000004, | |
| 2.3499999999999996, | |
| 2.549999999999999, | |
| 2.5999999999999988, | |
| 2.5999999999999988, | |
| 2.7499999999999982, | |
| 2.849999999999998, | |
| 2.849999999999998, | |
| 2.9499999999999975 | |
| ], | |
| "type": "longrope" | |
| }, | |
| "rope_theta": 10000.0, | |
| "sliding_window": 262144, | |
| "tie_word_embeddings": false, | |
| "torch_dtype": "float16", | |
| "transformers_version": "4.49.0", | |
| "use_cache": true, | |
| "vocab_size": 32064 | |
| } | |