venier commited on
Commit
b4a2baf
·
verified ·
1 Parent(s): 8d75525

Initial upload of assistant-daily adapter

Browse files
Files changed (2) hide show
  1. adapter_config.json +43 -0
  2. adapters.safetensors +3 -0
adapter_config.json ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "adapter_path": "/Users/giacomo/coding/snaply/writing_assistant_fine_tuning/output/adapters",
3
+ "alpha": 512,
4
+ "batch_size": 2,
5
+ "config": "/Users/giacomo/coding/snaply/writing_assistant_fine_tuning/output/lora_config.yaml",
6
+ "data": "/Users/giacomo/coding/snaply/writing_assistant_fine_tuning/data",
7
+ "dropout": 0.05,
8
+ "fine_tune_type": "lora",
9
+ "grad_accumulation_steps": 8,
10
+ "grad_checkpoint": false,
11
+ "iters": 3500,
12
+ "learning_rate": 2.5e-05,
13
+ "lora_parameters": {
14
+ "rank": 8,
15
+ "dropout": 0.0,
16
+ "scale": 20.0
17
+ },
18
+ "lr_schedule": null,
19
+ "mask_prompt": true,
20
+ "max_seq_length": 2048,
21
+ "model": "mlx-community/Jan-v3-4B-base-instruct-4bit",
22
+ "num_layers": 36,
23
+ "optimizer": "adam",
24
+ "optimizer_config": {
25
+ "adam": {},
26
+ "adamw": {},
27
+ "muon": {},
28
+ "sgd": {},
29
+ "adafactor": {}
30
+ },
31
+ "project_name": null,
32
+ "rank": 256,
33
+ "report_to": null,
34
+ "resume_adapter_file": null,
35
+ "save_every": 50,
36
+ "seed": 0,
37
+ "steps_per_eval": 50,
38
+ "steps_per_report": 10,
39
+ "test": false,
40
+ "test_batches": 500,
41
+ "train": true,
42
+ "val_batches": 36
43
+ }
adapters.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a7c35d78fff78a02235920ff253b56fdcde48700bfee584304ad28d23fd59f0d
3
+ size 66114656