| { | |
| "auto_mapping": null, | |
| "base_model_name_or_path": "Qwen/Qwen3-30B-A3B", | |
| "encoder_hidden_size": 768, | |
| "inference_mode": true, | |
| "modules_to_save": null, | |
| "num_attention_heads": 4, | |
| "num_layers": 48, | |
| "num_transformer_submodules": 1, | |
| "num_virtual_tokens": 40, | |
| "peft_type": "PREFIX_TUNING", | |
| "peft_version": "0.18.0", | |
| "prefix_projection": true, | |
| "revision": null, | |
| "task_type": "CAUSAL_LM", | |
| "token_dim": 512 | |
| } |