skar0 commited on
Commit
62ecc24
·
verified ·
1 Parent(s): 5f9ed36

Delete folder reward with huggingface_hub

Browse files
reward/adapter_config.json DELETED
@@ -1,49 +0,0 @@
1
- {
2
- "alpha_pattern": {},
3
- "auto_mapping": null,
4
- "base_model_name_or_path": "AlignmentResearch/Llama-3.3-Tiny-Instruct-boolq",
5
- "bias": "none",
6
- "corda_config": null,
7
- "eva_config": null,
8
- "exclude_modules": null,
9
- "fan_in_fan_out": false,
10
- "inference_mode": true,
11
- "init_lora_weights": true,
12
- "layer_replication": null,
13
- "layers_pattern": null,
14
- "layers_to_transform": null,
15
- "loftq_config": {},
16
- "lora_alpha": 512,
17
- "lora_bias": false,
18
- "lora_dropout": 0.0,
19
- "megatron_config": null,
20
- "megatron_core": "megatron.core",
21
- "modules_to_save": [
22
- "classifier",
23
- "score"
24
- ],
25
- "peft_type": "LORA",
26
- "r": 256,
27
- "rank_pattern": {},
28
- "revision": null,
29
- "target_modules": [
30
- "model.layers.0.mlp.up_proj",
31
- "model.layers.1.self_attn.k_proj",
32
- "model.layers.1.mlp.up_proj",
33
- "model.layers.0.self_attn.v_proj",
34
- "model.layers.1.self_attn.o_proj",
35
- "model.layers.0.self_attn.k_proj",
36
- "model.layers.0.mlp.down_proj",
37
- "model.layers.0.mlp.gate_proj",
38
- "model.layers.1.mlp.gate_proj",
39
- "model.layers.0.self_attn.q_proj",
40
- "model.layers.1.self_attn.v_proj",
41
- "model.layers.0.self_attn.o_proj",
42
- "model.layers.1.self_attn.q_proj",
43
- "model.layers.1.mlp.down_proj"
44
- ],
45
- "task_type": "SEQ_CLS",
46
- "trainable_token_indices": null,
47
- "use_dora": false,
48
- "use_rslora": false
49
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
reward/adapter_model.safetensors DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:31c7135c89a1b33b60662c8851027a70417c9a18587e7c9be6582864b557b0f8
3
- size 36181248