{ "path": "/root/models/Qwen3.5-0.8B-abliterated-HF", "type": "auto", "tokenizer_path": "/root/models/Qwen3.5-0.8B-abliterated-HF", "eagle_path": null, "lora_path": null, "gptq_path": null, "dst_path": "/root/models/Qwen3.5-0.8B-abliterated-MNN-fixed", "verbose": true, "test": null, "export": "mnn", "onnx_slim": false, "quant_bit": 4, "quant_block": 64, "visual_quant_bit": null, "visual_quant_block": null, "lm_quant_bit": 4, "lm_quant_block": 64, "mnnconvert": "/root/MNN-tokforge/build-host/MNNConvert", "ppl": false, "awq": false, "hqq": false, "omni": false, "transformer_fuse": false, "group_conv_native": false, "smooth": false, "sym": false, "visual_sym": false, "seperate_embed": false, "lora_split": false, "calib_data": null, "act_bit": 16, "embed_bit": 16, "act_sym": false, "quant_config": null, "generate_for_npu": false, "skip_weight": false, "omni_epochs": 20, "omni_lr": 0.005, "omni_wd": 0.0001, "tie_word_embeddings": true }