MelMitchell8 commited on
Commit
ef0deb3
·
1 Parent(s): bdd4756

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -1
config.json CHANGED
@@ -8,6 +8,7 @@
8
  "layers_to_transform": null,
9
  "lora_alpha": 16,
10
  "lora_dropout": 0.05,
 
11
  "modules_to_save": null,
12
  "peft_type": "LORA",
13
  "r": 8,
@@ -16,5 +17,6 @@
16
  "q_proj",
17
  "v_proj"
18
  ],
19
- "task_type": "CAUSAL_LM"
 
20
  }
 
8
  "layers_to_transform": null,
9
  "lora_alpha": 16,
10
  "lora_dropout": 0.05,
11
+ "model_type": "llama"
12
  "modules_to_save": null,
13
  "peft_type": "LORA",
14
  "r": 8,
 
17
  "q_proj",
18
  "v_proj"
19
  ],
20
+ "task_type": "CAUSAL_LM",
21
+ "tokenizer": "LlamaTokenizer"
22
  }