madan2248c commited on
Commit
ba7d724
·
verified ·
1 Parent(s): fc411d2

Upload model

Browse files
Files changed (2) hide show
  1. adapter_config.json +28 -11
  2. adapter_model.safetensors +2 -2
adapter_config.json CHANGED
@@ -1,17 +1,34 @@
1
  {
 
2
  "auto_mapping": null,
3
- "base_model_name_or_path": "unsloth/Llama-3.2-1B-Instruct-bnb-4bit",
 
 
 
 
 
4
  "inference_mode": true,
5
- "num_attention_heads": 32,
6
- "num_layers": 16,
7
- "num_transformer_submodules": 1,
8
- "num_virtual_tokens": 20,
9
- "peft_type": "PROMPT_TUNING",
10
- "prompt_tuning_init": "RANDOM",
11
- "prompt_tuning_init_text": null,
 
 
 
 
 
 
 
12
  "revision": null,
 
 
 
 
13
  "task_type": "CAUSAL_LM",
14
- "token_dim": 2048,
15
- "tokenizer_kwargs": null,
16
- "tokenizer_name_or_path": "axondendriteplus/contract_drafter-Llama-3.2-1B-Instruct"
17
  }
 
1
  {
2
+ "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": null,
5
+ "bias": "none",
6
+ "corda_config": null,
7
+ "eva_config": null,
8
+ "exclude_modules": null,
9
+ "fan_in_fan_out": false,
10
  "inference_mode": true,
11
+ "init_lora_weights": true,
12
+ "layer_replication": null,
13
+ "layers_pattern": null,
14
+ "layers_to_transform": null,
15
+ "loftq_config": {},
16
+ "lora_alpha": 32,
17
+ "lora_bias": false,
18
+ "lora_dropout": 0.05,
19
+ "megatron_config": null,
20
+ "megatron_core": "megatron.core",
21
+ "modules_to_save": null,
22
+ "peft_type": "LORA",
23
+ "r": 8,
24
+ "rank_pattern": {},
25
  "revision": null,
26
+ "target_modules": [
27
+ "v_proj",
28
+ "q_proj"
29
+ ],
30
  "task_type": "CAUSAL_LM",
31
+ "trainable_token_indices": null,
32
+ "use_dora": false,
33
+ "use_rslora": false
34
  }
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:69e68679e3b7a8772fb9de17dbcb4eb120866238afd43f15ad71cbc53082a4e0
3
- size 163960
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:164b23462248dc03ddc4d01f0fce5579f3aebbfc0f0b7d879224f3f5d5b87e30
3
+ size 22576168