| { |
| "alpha_pattern": {}, |
| "auto_mapping": { |
| "base_model_class": "MiniCPMV", |
| "parent_library": "transformers_modules.openbmb.MiniCPM-Llama3-V-2_5.320a581d2195ad4a52140bb427a07f7207aeac6e.modeling_minicpmv" |
| }, |
| "base_model_name_or_path": "openbmb/MiniCPM-Llama3-V-2_5", |
| "bias": "none", |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 64, |
| "lora_dropout": 0.05, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": [ |
| "embed_tokens", |
| "resampler", |
| "vpm" |
| ], |
| "peft_type": "LORA", |
| "r": 64, |
| "rank_pattern": {}, |
| "revision": null, |
| "target_modules": "llm\\..*layers\\.\\d+\\.(self_attn|vision_layers)\\.(q_proj|k_proj|v_proj|o_proj|attention)", |
| "task_type": null, |
| "use_dora": false, |
| "use_rslora": false |
| } |