jason-oneal commited on
Commit
fbe5e84
·
verified ·
1 Parent(s): e05bbe4

LoRA adapter for secgpt-base (canonical JSON enforced)

Browse files
README.md CHANGED
@@ -1,9 +1,9 @@
1
  ---
2
- base_model: jason-oneal/secgpt-base
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
- - base_model:adapter:jason-oneal/secgpt-base
7
  - lora
8
  - sft
9
  - transformers
 
1
  ---
2
+ base_model: p-e-w/gpt-oss-20b-heretic
3
  library_name: peft
4
  pipeline_tag: text-generation
5
  tags:
6
+ - base_model:adapter:p-e-w/gpt-oss-20b-heretic
7
  - lora
8
  - sft
9
  - transformers
adapter_config.json CHANGED
@@ -3,7 +3,7 @@
3
  "alpha_pattern": {},
4
  "arrow_config": null,
5
  "auto_mapping": null,
6
- "base_model_name_or_path": "jason-oneal/secgpt-base",
7
  "bias": "none",
8
  "corda_config": null,
9
  "ensure_weight_tying": false,
@@ -16,7 +16,7 @@
16
  "layers_pattern": null,
17
  "layers_to_transform": null,
18
  "loftq_config": {},
19
- "lora_alpha": 16,
20
  "lora_bias": false,
21
  "lora_dropout": 0.05,
22
  "megatron_config": null,
@@ -25,14 +25,14 @@
25
  "peft_type": "LORA",
26
  "peft_version": "0.18.0",
27
  "qalora_group_size": 16,
28
- "r": 8,
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
  "k_proj",
33
- "q_proj",
34
  "o_proj",
35
- "v_proj"
36
  ],
37
  "target_parameters": null,
38
  "task_type": "CAUSAL_LM",
 
3
  "alpha_pattern": {},
4
  "arrow_config": null,
5
  "auto_mapping": null,
6
+ "base_model_name_or_path": "p-e-w/gpt-oss-20b-heretic",
7
  "bias": "none",
8
  "corda_config": null,
9
  "ensure_weight_tying": false,
 
16
  "layers_pattern": null,
17
  "layers_to_transform": null,
18
  "loftq_config": {},
19
+ "lora_alpha": 32,
20
  "lora_bias": false,
21
  "lora_dropout": 0.05,
22
  "megatron_config": null,
 
25
  "peft_type": "LORA",
26
  "peft_version": "0.18.0",
27
  "qalora_group_size": 16,
28
+ "r": 16,
29
  "rank_pattern": {},
30
  "revision": null,
31
  "target_modules": [
32
  "k_proj",
33
+ "v_proj",
34
  "o_proj",
35
+ "q_proj"
36
  ],
37
  "target_parameters": null,
38
  "task_type": "CAUSAL_LM",
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:501e02eb2d444928ff7753411f5762554bbeaf06bbcfbd5a5d106ead3d34d604
3
- size 15950616
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6756fac31b869f65b43ba594aea3baa607e9c9a60a9e88f809991ace4e1af907
3
+ size 31876192
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:25ab7cbf548d7ff3b1df1cc50e27d7d33fc778af0fd46ae5b6d57c8b223afbf4
3
  size 6225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d4f45aa3369fe5c4684f9c5929aaf4eefe4f265f0032aba20c896e6ba5944a8
3
  size 6225