turtle170 commited on
Commit
dfae1e6
·
verified ·
1 Parent(s): 1335eaa

Create ds_config.json

Browse files
Files changed (1) hide show
  1. ds_config.json +40 -0
ds_config.json ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fp16": {
3
+ "enabled": "auto"
4
+ },
5
+ "bf16": {
6
+ "enabled": "auto"
7
+ },
8
+ "optimizer": {
9
+ "type": "AdamW",
10
+ "params": {
11
+ "lr": "auto",
12
+ "betas": "auto",
13
+ "eps": "auto",
14
+ "weight_decay": "auto"
15
+ }
16
+ },
17
+ "scheduler": {
18
+ "type": "WarmupLR",
19
+ "params": {
20
+ "warmup_min_lr": "auto",
21
+ "warmup_max_lr": "auto",
22
+ "warmup_num_steps": "auto"
23
+ }
24
+ },
25
+ "zero_optimization": {
26
+ "stage": 2,
27
+ "allgather_partitions": true,
28
+ "allgather_bucket_size": 2e8,
29
+ "overlap_comm": true,
30
+ "reduce_scatter": true,
31
+ "reduce_bucket_size": 2e8,
32
+ "contiguous_gradients": true
33
+ },
34
+ "gradient_accumulation_steps": "auto",
35
+ "gradient_clipping": "auto",
36
+ "steps_per_print": 100,
37
+ "train_batch_size": "auto",
38
+ "train_micro_batch_size_per_gpu": "auto",
39
+ "wall_clock_breakdown": false
40
+ }