friedeggs01 commited on
Commit
8c6dc07
·
verified ·
1 Parent(s): 1ff45c9

Training in progress, epoch 1

Browse files
adapter_config.json ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alpha_pattern": {},
3
+ "auto_mapping": {
4
+ "base_model_class": "PhiForCausalLM",
5
+ "parent_library": "transformers.models.phi.modeling_phi"
6
+ },
7
+ "base_model_name_or_path": "microsoft/phi-1_5",
8
+ "bias": "none",
9
+ "eva_config": null,
10
+ "exclude_modules": null,
11
+ "fan_in_fan_out": false,
12
+ "inference_mode": true,
13
+ "init_lora_weights": true,
14
+ "layer_replication": null,
15
+ "layers_pattern": null,
16
+ "layers_to_transform": null,
17
+ "loftq_config": {},
18
+ "lora_alpha": 16,
19
+ "lora_bias": false,
20
+ "lora_dropout": 0.05,
21
+ "megatron_config": null,
22
+ "megatron_core": "megatron.core",
23
+ "modules_to_save": null,
24
+ "peft_type": "LORA",
25
+ "r": 8,
26
+ "rank_pattern": {},
27
+ "revision": null,
28
+ "target_modules": [
29
+ "v_proj",
30
+ "q_proj"
31
+ ],
32
+ "task_type": null,
33
+ "use_dora": false,
34
+ "use_rslora": false
35
+ }
adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7830f5cdd64382cd2119db4d05d2c08bff2bea246ef47ade23ba52a55c4deb3e
3
+ size 6304096
runs/Mar28_08-48-42_9d4780d44415/events.out.tfevents.1743151723.9d4780d44415.31.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47363d695832accbe110ca49a851c4ac646df43bd68905da82f3cc202bca0375
3
+ size 4184
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c38fd887748b652074f55c848b5f8e554422704061daf5627c1d2d347d67bba
3
+ size 5304