chaoyan commited on
Commit
9b2ad78
·
1 Parent(s): 759cd11

Add deepspeed support

Browse files
Files changed (4) hide show
  1. ds_config_zero2.json +50 -0
  2. ds_config_zero3.json +57 -0
  3. run_cat.sh +3 -2
  4. run_mlm_local.py +1 -2
ds_config_zero2.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fp16": {
3
+ "enabled": "auto",
4
+ "loss_scale": 0,
5
+ "loss_scale_window": 1000,
6
+ "initial_scale_power": 16,
7
+ "hysteresis": 2,
8
+ "min_loss_scale": 1
9
+ },
10
+
11
+ "optimizer": {
12
+ "type": "AdamW",
13
+ "params": {
14
+ "lr": "auto",
15
+ "betas": "auto",
16
+ "eps": "auto",
17
+ "weight_decay": "auto"
18
+ }
19
+ },
20
+
21
+ "scheduler": {
22
+ "type": "WarmupLR",
23
+ "params": {
24
+ "warmup_min_lr": "auto",
25
+ "warmup_max_lr": "auto",
26
+ "warmup_num_steps": "auto"
27
+ }
28
+ },
29
+
30
+ "zero_optimization": {
31
+ "stage": 2,
32
+ "offload_optimizer": {
33
+ "device": "cpu",
34
+ "pin_memory": true
35
+ },
36
+ "allgather_partitions": true,
37
+ "allgather_bucket_size": 2e8,
38
+ "overlap_comm": true,
39
+ "reduce_scatter": true,
40
+ "reduce_bucket_size": 2e8,
41
+ "contiguous_gradients": true
42
+ },
43
+
44
+ "gradient_accumulation_steps": "auto",
45
+ "gradient_clipping": "auto",
46
+ "steps_per_print": 2000,
47
+ "train_batch_size": "auto",
48
+ "train_micro_batch_size_per_gpu": "auto",
49
+ "wall_clock_breakdown": false
50
+ }
ds_config_zero3.json ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "fp16": {
3
+ "enabled": "auto",
4
+ "loss_scale": 0,
5
+ "loss_scale_window": 1000,
6
+ "initial_scale_power": 16,
7
+ "hysteresis": 2,
8
+ "min_loss_scale": 1
9
+ },
10
+
11
+ "optimizer": {
12
+ "type": "AdamW",
13
+ "params": {
14
+ "lr": "auto",
15
+ "betas": "auto",
16
+ "eps": "auto",
17
+ "weight_decay": "auto"
18
+ }
19
+ },
20
+
21
+ "scheduler": {
22
+ "type": "WarmupLR",
23
+ "params": {
24
+ "warmup_min_lr": "auto",
25
+ "warmup_max_lr": "auto",
26
+ "warmup_num_steps": "auto"
27
+ }
28
+ },
29
+
30
+ "zero_optimization": {
31
+ "stage": 3,
32
+ "offload_optimizer": {
33
+ "device": "cpu",
34
+ "pin_memory": true
35
+ },
36
+ "offload_param": {
37
+ "device": "cpu",
38
+ "pin_memory": true
39
+ },
40
+ "overlap_comm": true,
41
+ "contiguous_gradients": true,
42
+ "sub_group_size": 1e9,
43
+ "reduce_bucket_size": "auto",
44
+ "stage3_prefetch_bucket_size": "auto",
45
+ "stage3_param_persistence_threshold": "auto",
46
+ "stage3_max_live_parameters": 1e9,
47
+ "stage3_max_reuse_distance": 1e9,
48
+ "stage3_gather_16bit_weights_on_model_save": true
49
+ },
50
+
51
+ "gradient_accumulation_steps": "auto",
52
+ "gradient_clipping": "auto",
53
+ "steps_per_print": 2000,
54
+ "train_batch_size": "auto",
55
+ "train_micro_batch_size_per_gpu": "auto",
56
+ "wall_clock_breakdown": false
57
+ }
run_cat.sh CHANGED
@@ -1,8 +1,9 @@
1
- python run_mlm_local.py \
2
  --model_config_id "bert-base-uncased" \
3
  --dataset_id "chaoyan/processed_bert_dataset" \
4
  --tokenizer_id "chaoyan/bert-base-uncased-cat_tokenizer" \
5
  --repository_id "bert-base-uncased-cat_model" \
6
  --max_steps 1000_000 \
7
- --per_device_train_batch_size 32 \
8
  --learning_rate 5e-5 \
 
 
1
+ deepspeed run_mlm_local.py \
2
  --model_config_id "bert-base-uncased" \
3
  --dataset_id "chaoyan/processed_bert_dataset" \
4
  --tokenizer_id "chaoyan/bert-base-uncased-cat_tokenizer" \
5
  --repository_id "bert-base-uncased-cat_model" \
6
  --max_steps 1000_000 \
7
+ --per_device_train_batch_size 64 \
8
  --learning_rate 5e-5 \
9
+ --deepspeed ds_config_zero3.json
run_mlm_local.py CHANGED
@@ -1,4 +1,3 @@
1
- from email.policy import default
2
  import os
3
  import logging
4
  import sys
@@ -63,7 +62,7 @@ class ScriptArguments:
63
  def run_mlm():
64
  # Parse arguments
65
  parser = HfArgumentParser(ScriptArguments)
66
- script_args = parser.parse_args_into_dataclasses()[0]
67
  logger.info(f"Script parameters {script_args}")
68
 
69
  # set seed for reproducibility
 
 
1
  import os
2
  import logging
3
  import sys
 
62
  def run_mlm():
63
  # Parse arguments
64
  parser = HfArgumentParser(ScriptArguments)
65
+ script_args = parser.parse_args_into_dataclasses(return_remaining_strings=True)[0]
66
  logger.info(f"Script parameters {script_args}")
67
 
68
  # set seed for reproducibility