lora_model / adapter_config.json
swesty's picture
Upload model trained with Unsloth
6e04b19 verified
{
"alpha_pattern": {},
"auto_mapping": {
"base_model_class": "NemotronHForCausalLM",
"parent_library": "transformers_modules.unsloth.Nemotron_hyphen_3_hyphen_Nano_hyphen_30B_hyphen_A3B.b93ba8494bf95b9e5dd7aed6b5d07517db195743.modeling_nemotron_h",
"unsloth_fixed": true
},
"base_model_name_or_path": "unsloth/Nemotron-3-Nano-30B-A3B",
"bias": "none",
"corda_config": null,
"eva_config": null,
"exclude_modules": null,
"fan_in_fan_out": false,
"inference_mode": true,
"init_lora_weights": true,
"layer_replication": null,
"layers_pattern": null,
"layers_to_transform": null,
"loftq_config": {},
"lora_alpha": 16,
"lora_bias": false,
"lora_dropout": 0,
"megatron_config": null,
"megatron_core": "megatron.core",
"modules_to_save": null,
"peft_type": "LORA",
"qalora_group_size": 16,
"r": 8,
"rank_pattern": {},
"revision": null,
"target_modules": [
"q_proj",
"o_proj",
"v_proj",
"in_proj",
"out_proj",
"up_proj",
"down_proj",
"k_proj",
"gate_proj"
],
"target_parameters": null,
"task_type": "CAUSAL_LM",
"trainable_token_indices": null,
"use_dora": false,
"use_qalora": false,
"use_rslora": false
}