eddierex commited on
Commit
1efc73d
·
verified ·
1 Parent(s): 2405442

Upload GR00T_N1_5 checkpoint 20000 for OpenWBC bottle task

Browse files
config.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "action_horizon": 50,
3
+ "add_pos_embed": true,
4
+ "apply_sincos_state_encoding": true,
5
+ "architectures": [
6
+ "Gr00tN1d6"
7
+ ],
8
+ "attn_dropout": 0.2,
9
+ "attn_implementation": null,
10
+ "backbone_embedding_dim": 2048,
11
+ "backbone_model_type": "eagle",
12
+ "backbone_trainable_params_fp32": true,
13
+ "collator_overwrite_image_inputs": false,
14
+ "color_jitter_params": {
15
+ "brightness": 0.1,
16
+ "contrast": 0.1,
17
+ "hue": 0.1,
18
+ "saturation": 0.1
19
+ },
20
+ "crop_fraction": 0.95,
21
+ "diffusion_model_cfg": {
22
+ "attention_head_dim": 48,
23
+ "dropout": 0.2,
24
+ "final_dropout": true,
25
+ "interleave_self_attention": true,
26
+ "norm_type": "ada_norm",
27
+ "num_attention_heads": 32,
28
+ "num_layers": 32,
29
+ "output_dim": 1024,
30
+ "positional_embeddings": null
31
+ },
32
+ "eagle_collator": true,
33
+ "formalize_language": true,
34
+ "gemma_collator": false,
35
+ "hidden_size": 1024,
36
+ "image_crop_size": null,
37
+ "image_target_size": null,
38
+ "input_embedding_dim": 1536,
39
+ "load_bf16": true,
40
+ "max_action_dim": 128,
41
+ "max_num_embodiments": 32,
42
+ "max_seq_len": 1024,
43
+ "max_state_dim": 128,
44
+ "model_dtype": "bfloat16",
45
+ "model_name": "nvidia/Eagle-Block2A-2B-v2",
46
+ "model_type": "Gr00tN1d6",
47
+ "noise_beta_alpha": 1.5,
48
+ "noise_beta_beta": 1.0,
49
+ "noise_s": 0.999,
50
+ "num_inference_timesteps": 4,
51
+ "num_timestep_buckets": 1000,
52
+ "random_rotation_angle": null,
53
+ "reproject_vision": false,
54
+ "select_layer": 16,
55
+ "shortest_image_edge": 256,
56
+ "state_dropout_prob": 0.0,
57
+ "torch_dtype": "bfloat16",
58
+ "transformers_version": "4.51.3",
59
+ "tune_diffusion_model": true,
60
+ "tune_llm": false,
61
+ "tune_projector": true,
62
+ "tune_top_llm_layers": 4,
63
+ "tune_visual": false,
64
+ "tune_vlln": true,
65
+ "use_albumentations_transforms": true,
66
+ "use_alternate_vl_dit": true,
67
+ "use_flash_attention": true,
68
+ "use_relative_action": true,
69
+ "use_vlln": true
70
+ }
embodiment_id.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "robocasa_panda_omron": 13,
3
+ "gr1": 20,
4
+ "behavior_r1_pro": 24,
5
+ "unitree_g1": 8,
6
+ "oxe_google": 0,
7
+ "oxe_widowx": 1,
8
+ "libero_panda": 2,
9
+ "new_embodiment": 10
10
+ }
experiment_cfg/conf.yaml ADDED
@@ -0,0 +1,263 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ load_config_path: null
2
+ model:
3
+ model_type: Gr00tN1d6
4
+ model_dtype: bfloat16
5
+ model_name: nvidia/Eagle-Block2A-2B-v2
6
+ backbone_model_type: eagle
7
+ model_revision: null
8
+ tune_top_llm_layers: 4
9
+ backbone_embedding_dim: 2048
10
+ tune_llm: false
11
+ tune_visual: false
12
+ select_layer: 16
13
+ reproject_vision: false
14
+ use_flash_attention: true
15
+ load_bf16: false
16
+ collator_overwrite_image_inputs: false
17
+ eagle_collator: true
18
+ backbone_trainable_params_fp32: true
19
+ image_crop_size: null
20
+ image_target_size: null
21
+ shortest_image_edge: 256
22
+ crop_fraction: 0.95
23
+ random_rotation_angle: 5
24
+ color_jitter_params:
25
+ brightness: 0.2
26
+ contrast: 0.2
27
+ saturation: 0.2
28
+ hue: 0.08
29
+ use_albumentations_transforms: true
30
+ formalize_language: true
31
+ apply_sincos_state_encoding: false
32
+ use_relative_action: true
33
+ max_state_dim: 29
34
+ max_action_dim: 29
35
+ action_horizon: 16
36
+ hidden_size: 1024
37
+ input_embedding_dim: 1536
38
+ add_pos_embed: true
39
+ attn_dropout: 0.2
40
+ use_vlln: true
41
+ max_seq_len: 1024
42
+ use_alternate_vl_dit: true
43
+ attend_text_every_n_blocks: 2
44
+ diffusion_model_cfg:
45
+ positional_embeddings: null
46
+ num_layers: 32
47
+ num_attention_heads: 32
48
+ attention_head_dim: 48
49
+ norm_type: ada_norm
50
+ dropout: 0.2
51
+ final_dropout: true
52
+ output_dim: 1024
53
+ interleave_self_attention: true
54
+ num_inference_timesteps: 4
55
+ noise_beta_alpha: 1.5
56
+ noise_beta_beta: 1.0
57
+ noise_s: 0.999
58
+ num_timestep_buckets: 1000
59
+ tune_projector: true
60
+ tune_diffusion_model: true
61
+ tune_vlln: true
62
+ state_dropout_prob: 0.0
63
+ state_additive_noise_scale: 0.0
64
+ max_num_embodiments: 32
65
+ data:
66
+ datasets:
67
+ - dataset_paths:
68
+ - /home/opuser/wtx/vla/data/new/bottombetter_gr00t
69
+ embodiment_tag: new_embodiment
70
+ mix_ratio: 2.0
71
+ dataset_type: physical_embodiment
72
+ val_dataset_path: null
73
+ - dataset_paths:
74
+ - /home/opuser/wtx/vla/data/new/onlybottom_gr00t
75
+ embodiment_tag: new_embodiment
76
+ mix_ratio: 1.0
77
+ dataset_type: physical_embodiment
78
+ val_dataset_path: null
79
+ - dataset_paths:
80
+ - /home/opuser/wtx/vla/data/new/squatandup_gr00t
81
+ embodiment_tag: new_embodiment
82
+ mix_ratio: 1.0
83
+ dataset_type: physical_embodiment
84
+ val_dataset_path: null
85
+ - dataset_paths:
86
+ - /home/opuser/wtx/vla/data/new/up_gr00t
87
+ embodiment_tag: new_embodiment
88
+ mix_ratio: 1.0
89
+ dataset_type: physical_embodiment
90
+ val_dataset_path: null
91
+ modality_configs:
92
+ new_embodiment:
93
+ video:
94
+ delta_indices:
95
+ - 0
96
+ modality_keys:
97
+ - ego_view
98
+ - wrist_left
99
+ - wrist_right
100
+ sin_cos_embedding_keys: null
101
+ mean_std_embedding_keys: null
102
+ action_configs: null
103
+ state:
104
+ delta_indices:
105
+ - 0
106
+ modality_keys:
107
+ - left_arm
108
+ - right_arm
109
+ - left_hand
110
+ - right_hand
111
+ - left_leg
112
+ - right_leg
113
+ sin_cos_embedding_keys:
114
+ - left_arm
115
+ - right_arm
116
+ - left_hand
117
+ - right_hand
118
+ - left_leg
119
+ - right_leg
120
+ mean_std_embedding_keys: []
121
+ action_configs: null
122
+ action:
123
+ delta_indices:
124
+ - 0
125
+ - 1
126
+ - 2
127
+ - 3
128
+ - 4
129
+ - 5
130
+ - 6
131
+ - 7
132
+ - 8
133
+ - 9
134
+ - 10
135
+ - 11
136
+ - 12
137
+ - 13
138
+ - 14
139
+ - 15
140
+ modality_keys:
141
+ - left_arm
142
+ - right_arm
143
+ - left_hand
144
+ - right_hand
145
+ - navigate_command
146
+ - base_height_command
147
+ - waist
148
+ sin_cos_embedding_keys: null
149
+ mean_std_embedding_keys: null
150
+ action_configs:
151
+ - rep: RELATIVE
152
+ type: NON_EEF
153
+ format: DEFAULT
154
+ state_key: null
155
+ - rep: RELATIVE
156
+ type: NON_EEF
157
+ format: DEFAULT
158
+ state_key: null
159
+ - rep: ABSOLUTE
160
+ type: NON_EEF
161
+ format: DEFAULT
162
+ state_key: null
163
+ - rep: ABSOLUTE
164
+ type: NON_EEF
165
+ format: DEFAULT
166
+ state_key: null
167
+ - rep: ABSOLUTE
168
+ type: NON_EEF
169
+ format: DEFAULT
170
+ state_key: null
171
+ - rep: ABSOLUTE
172
+ type: NON_EEF
173
+ format: DEFAULT
174
+ state_key: null
175
+ - rep: ABSOLUTE
176
+ type: NON_EEF
177
+ format: DEFAULT
178
+ state_key: null
179
+ language:
180
+ delta_indices:
181
+ - 0
182
+ modality_keys:
183
+ - annotation.human.action.task_description
184
+ sin_cos_embedding_keys: null
185
+ mean_std_embedding_keys: null
186
+ action_configs: null
187
+ download_cache: false
188
+ shard_size: 1024
189
+ episode_sampling_rate: 0.1
190
+ num_shards_per_epoch: 1000
191
+ override_pretraining_statistics: false
192
+ mode: single_turn
193
+ random_chop: 0.0
194
+ mock_dataset_mode: false
195
+ shuffle: true
196
+ seed: 42
197
+ multiprocessing_context: fork
198
+ allow_padding: false
199
+ subsample_ratio: 1.0
200
+ image_crop_size:
201
+ - 244
202
+ - 244
203
+ image_target_size:
204
+ - 224
205
+ - 224
206
+ video_backend: torchcodec
207
+ training:
208
+ output_dir: ./save/test
209
+ experiment_name: null
210
+ max_steps: 20000
211
+ global_batch_size: 96
212
+ batch_size: null
213
+ gradient_accumulation_steps: 1
214
+ learning_rate: 0.0001
215
+ lr_scheduler_type: cosine
216
+ weight_decay: 1.0e-05
217
+ warmup_ratio: 0.05
218
+ warmup_steps: 0
219
+ max_grad_norm: 1.0
220
+ optim: adamw_torch
221
+ start_from_checkpoint: ../models/GR00T-N1.6-G1-PnPAppleToPlate/
222
+ tf32: true
223
+ fp16: false
224
+ bf16: true
225
+ eval_bf16: true
226
+ logging_steps: 10
227
+ save_steps: 1000
228
+ save_total_limit: 5
229
+ save_vl_model: false
230
+ upload_checkpoints: false
231
+ upload_every: 1000
232
+ upload_last_n_checkpoints: 5
233
+ max_concurrent_uploads: 2
234
+ eval_strategy: 'no'
235
+ eval_steps: 500
236
+ eval_set_split_ratio: 0.1
237
+ eval_batch_size: 2
238
+ save_best_eval_metric_name: ''
239
+ save_best_eval_metric_greater_is_better: true
240
+ deepspeed_stage: 2
241
+ gradient_checkpointing: false
242
+ transformers_trust_remote_code: true
243
+ transformers_local_files_only: false
244
+ transformers_cache_dir: null
245
+ transformers_access_token: null
246
+ use_ddp: false
247
+ ddp_bucket_cap_mb: 100
248
+ num_gpus: 2
249
+ dataloader_num_workers: 2
250
+ remove_unused_columns: false
251
+ use_wandb: true
252
+ wandb_project: finetune-gr00t-n1d6
253
+ enable_profiling: false
254
+ max_retries: 3
255
+ assert_loss_less_than: null
256
+ add_rl_callback: false
257
+ enable_open_loop_eval: false
258
+ open_loop_eval_traj_ids:
259
+ - 0
260
+ open_loop_eval_steps_per_traj: 100
261
+ open_loop_eval_plot_indices: null
262
+ max_steps: 20000
263
+ save_steps: 1000
experiment_cfg/config.yaml ADDED
@@ -0,0 +1,305 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ !!python/object:gr00t.configs.base_config.Config
2
+ data: !!python/object:gr00t.configs.data.data_config.DataConfig
3
+ allow_padding: false
4
+ datasets:
5
+ - !!python/object:gr00t.configs.data.data_config.SingleDatasetConfig
6
+ dataset_paths:
7
+ - /home/opuser/wtx/vla/data/new/bottombetter_gr00t
8
+ dataset_type: physical_embodiment
9
+ embodiment_tag: new_embodiment
10
+ mix_ratio: 2.0
11
+ val_dataset_path: null
12
+ - !!python/object:gr00t.configs.data.data_config.SingleDatasetConfig
13
+ dataset_paths:
14
+ - /home/opuser/wtx/vla/data/new/onlybottom_gr00t
15
+ dataset_type: physical_embodiment
16
+ embodiment_tag: new_embodiment
17
+ mix_ratio: 1.0
18
+ val_dataset_path: null
19
+ - !!python/object:gr00t.configs.data.data_config.SingleDatasetConfig
20
+ dataset_paths:
21
+ - /home/opuser/wtx/vla/data/new/squatandup_gr00t
22
+ dataset_type: physical_embodiment
23
+ embodiment_tag: new_embodiment
24
+ mix_ratio: 1.0
25
+ val_dataset_path: null
26
+ - !!python/object:gr00t.configs.data.data_config.SingleDatasetConfig
27
+ dataset_paths:
28
+ - /home/opuser/wtx/vla/data/new/up_gr00t
29
+ dataset_type: physical_embodiment
30
+ embodiment_tag: new_embodiment
31
+ mix_ratio: 1.0
32
+ val_dataset_path: null
33
+ download_cache: false
34
+ episode_sampling_rate: 0.1
35
+ image_crop_size:
36
+ - 244
37
+ - 244
38
+ image_target_size:
39
+ - 224
40
+ - 224
41
+ mock_dataset_mode: false
42
+ modality_configs:
43
+ new_embodiment:
44
+ action: !!python/object:gr00t.data.types.ModalityConfig
45
+ action_configs:
46
+ - !!python/object:gr00t.data.types.ActionConfig
47
+ format: &id001 !!python/object/apply:gr00t.data.types.ActionFormat
48
+ - default
49
+ rep: &id002 !!python/object/apply:gr00t.data.types.ActionRepresentation
50
+ - relative
51
+ state_key: null
52
+ type: &id003 !!python/object/apply:gr00t.data.types.ActionType
53
+ - non_eef
54
+ - !!python/object:gr00t.data.types.ActionConfig
55
+ format: *id001
56
+ rep: *id002
57
+ state_key: null
58
+ type: *id003
59
+ - !!python/object:gr00t.data.types.ActionConfig
60
+ format: *id001
61
+ rep: &id004 !!python/object/apply:gr00t.data.types.ActionRepresentation
62
+ - absolute
63
+ state_key: null
64
+ type: *id003
65
+ - !!python/object:gr00t.data.types.ActionConfig
66
+ format: *id001
67
+ rep: *id004
68
+ state_key: null
69
+ type: *id003
70
+ - !!python/object:gr00t.data.types.ActionConfig
71
+ format: *id001
72
+ rep: *id004
73
+ state_key: null
74
+ type: *id003
75
+ - !!python/object:gr00t.data.types.ActionConfig
76
+ format: *id001
77
+ rep: *id004
78
+ state_key: null
79
+ type: *id003
80
+ - !!python/object:gr00t.data.types.ActionConfig
81
+ format: *id001
82
+ rep: *id004
83
+ state_key: null
84
+ type: *id003
85
+ delta_indices:
86
+ - 0
87
+ - 1
88
+ - 2
89
+ - 3
90
+ - 4
91
+ - 5
92
+ - 6
93
+ - 7
94
+ - 8
95
+ - 9
96
+ - 10
97
+ - 11
98
+ - 12
99
+ - 13
100
+ - 14
101
+ - 15
102
+ mean_std_embedding_keys: null
103
+ modality_keys:
104
+ - left_arm
105
+ - right_arm
106
+ - left_hand
107
+ - right_hand
108
+ - navigate_command
109
+ - base_height_command
110
+ - waist
111
+ sin_cos_embedding_keys: null
112
+ language: !!python/object:gr00t.data.types.ModalityConfig
113
+ action_configs: null
114
+ delta_indices:
115
+ - 0
116
+ mean_std_embedding_keys: null
117
+ modality_keys:
118
+ - annotation.human.action.task_description
119
+ sin_cos_embedding_keys: null
120
+ state: !!python/object:gr00t.data.types.ModalityConfig
121
+ action_configs: null
122
+ delta_indices:
123
+ - 0
124
+ mean_std_embedding_keys: []
125
+ modality_keys:
126
+ - left_arm
127
+ - right_arm
128
+ - left_hand
129
+ - right_hand
130
+ - left_leg
131
+ - right_leg
132
+ sin_cos_embedding_keys:
133
+ - left_arm
134
+ - right_arm
135
+ - left_hand
136
+ - right_hand
137
+ - left_leg
138
+ - right_leg
139
+ video: !!python/object:gr00t.data.types.ModalityConfig
140
+ action_configs: null
141
+ delta_indices:
142
+ - 0
143
+ mean_std_embedding_keys: null
144
+ modality_keys:
145
+ - ego_view
146
+ - wrist_left
147
+ - wrist_right
148
+ sin_cos_embedding_keys: null
149
+ mode: single_turn
150
+ multiprocessing_context: fork
151
+ num_shards_per_epoch: 1000
152
+ override_pretraining_statistics: false
153
+ random_chop: 0.0
154
+ seed: 42
155
+ shard_size: 1024
156
+ shuffle: true
157
+ subsample_ratio: 1.0
158
+ video_backend: torchcodec
159
+ load_config_path: null
160
+ model: !!python/object:gr00t.configs.model.gr00t_n1d6.Gr00tN1d6Config
161
+ _attn_implementation_autoset: false
162
+ _attn_implementation_internal: null
163
+ _commit_hash: null
164
+ _name_or_path: ''
165
+ add_cross_attention: false
166
+ architectures: null
167
+ backbone_model_type: eagle
168
+ backbone_trainable_params_fp32: true
169
+ bad_words_ids: null
170
+ begin_suppress_tokens: null
171
+ bos_token_id: null
172
+ chunk_size_feed_forward: 0
173
+ color_jitter_params:
174
+ brightness: 0.2
175
+ contrast: 0.2
176
+ hue: 0.08
177
+ saturation: 0.2
178
+ cross_attention_hidden_size: null
179
+ decoder_start_token_id: null
180
+ diffusion_model_cfg:
181
+ attention_head_dim: 48
182
+ dropout: 0.2
183
+ final_dropout: true
184
+ interleave_self_attention: true
185
+ norm_type: ada_norm
186
+ num_attention_heads: 32
187
+ num_layers: 32
188
+ output_dim: 1024
189
+ positional_embeddings: null
190
+ diversity_penalty: 0.0
191
+ do_sample: false
192
+ eagle_collator: true
193
+ early_stopping: false
194
+ encoder_no_repeat_ngram_size: 0
195
+ eos_token_id: null
196
+ exponential_decay_length_penalty: null
197
+ finetuning_task: null
198
+ forced_bos_token_id: null
199
+ forced_eos_token_id: null
200
+ id2label:
201
+ 0: LABEL_0
202
+ 1: LABEL_1
203
+ is_decoder: false
204
+ is_encoder_decoder: false
205
+ label2id:
206
+ LABEL_0: 0
207
+ LABEL_1: 1
208
+ length_penalty: 1.0
209
+ load_bf16: false
210
+ max_length: 20
211
+ min_length: 0
212
+ model_name: nvidia/Eagle-Block2A-2B-v2
213
+ no_repeat_ngram_size: 0
214
+ num_beam_groups: 1
215
+ num_beams: 1
216
+ num_return_sequences: 1
217
+ output_attentions: false
218
+ output_hidden_states: false
219
+ output_scores: false
220
+ pad_token_id: null
221
+ prefix: null
222
+ problem_type: null
223
+ pruned_heads: {}
224
+ random_rotation_angle: 5
225
+ remove_invalid_values: false
226
+ repetition_penalty: 1.0
227
+ reproject_vision: false
228
+ return_dict: true
229
+ return_dict_in_generate: false
230
+ sep_token_id: null
231
+ state_dropout_prob: 0.0
232
+ suppress_tokens: null
233
+ task_specific_params: null
234
+ temperature: 1.0
235
+ tf_legacy_loss: false
236
+ tie_encoder_decoder: false
237
+ tie_word_embeddings: true
238
+ tokenizer_class: null
239
+ top_k: 50
240
+ top_p: 1.0
241
+ torch_dtype: null
242
+ torchscript: false
243
+ transformers_version: null
244
+ tune_diffusion_model: true
245
+ tune_llm: false
246
+ tune_projector: true
247
+ tune_visual: false
248
+ typical_p: 1.0
249
+ use_bfloat16: false
250
+ use_relative_action: true
251
+ training: !!python/object:gr00t.configs.training.training_config.TrainingConfig
252
+ add_rl_callback: false
253
+ assert_loss_less_than: null
254
+ batch_size: null
255
+ bf16: true
256
+ dataloader_num_workers: 2
257
+ ddp_bucket_cap_mb: 100
258
+ deepspeed_stage: 2
259
+ enable_open_loop_eval: false
260
+ enable_profiling: false
261
+ eval_batch_size: 2
262
+ eval_bf16: true
263
+ eval_set_split_ratio: 0.1
264
+ eval_steps: 500
265
+ eval_strategy: 'no'
266
+ experiment_name: null
267
+ fp16: false
268
+ global_batch_size: 96
269
+ gradient_accumulation_steps: 1
270
+ gradient_checkpointing: false
271
+ learning_rate: 0.0001
272
+ logging_steps: 10
273
+ lr_scheduler_type: cosine
274
+ max_concurrent_uploads: 2
275
+ max_grad_norm: 1.0
276
+ max_retries: 3
277
+ max_steps: 20000
278
+ num_gpus: 2
279
+ open_loop_eval_plot_indices: null
280
+ open_loop_eval_steps_per_traj: 100
281
+ open_loop_eval_traj_ids:
282
+ - 0
283
+ optim: adamw_torch
284
+ output_dir: ./save/test
285
+ remove_unused_columns: false
286
+ save_best_eval_metric_greater_is_better: true
287
+ save_best_eval_metric_name: ''
288
+ save_steps: 1000
289
+ save_total_limit: 5
290
+ save_vl_model: false
291
+ start_from_checkpoint: ../models/GR00T-N1.6-G1-PnPAppleToPlate/
292
+ tf32: true
293
+ transformers_access_token: null
294
+ transformers_cache_dir: null
295
+ transformers_local_files_only: false
296
+ transformers_trust_remote_code: true
297
+ upload_checkpoints: false
298
+ upload_every: 1000
299
+ upload_last_n_checkpoints: 5
300
+ use_ddp: false
301
+ use_wandb: true
302
+ wandb_project: finetune-gr00t-n1d6
303
+ warmup_ratio: 0.05
304
+ warmup_steps: 0
305
+ weight_decay: 1.0e-05
experiment_cfg/dataset_statistics.json ADDED
@@ -0,0 +1,2398 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "new_embodiment": {
3
+ "state": {
4
+ "left_arm": {
5
+ "min": [
6
+ -0.011289140209555626,
7
+ -0.18929164111614227,
8
+ -0.17192569375038147,
9
+ -0.4075475335121155,
10
+ -0.2102392613887787,
11
+ -0.6168137192726135,
12
+ -0.3702109456062317
13
+ ],
14
+ "max": [
15
+ 0.4650239050388336,
16
+ 0.0822349488735199,
17
+ 0.14088654518127441,
18
+ 0.33636125922203064,
19
+ 0.235897496342659,
20
+ 0.17326313257217407,
21
+ 0.13865652680397034
22
+ ],
23
+ "mean": [
24
+ 0.19130800068378448,
25
+ -0.030624560825526715,
26
+ -0.016826458415016534,
27
+ -0.033245627582073216,
28
+ -0.009227584581822159,
29
+ -0.03419217579066754,
30
+ -0.06839065849781037
31
+ ],
32
+ "std": [
33
+ 0.11796806496850198,
34
+ 0.04606841335968739,
35
+ 0.0636215380512804,
36
+ 0.1509182574371581,
37
+ 0.0857767481262272,
38
+ 0.1980914269640867,
39
+ 0.08929451679199753
40
+ ],
41
+ "q01": [
42
+ 0.03888953112065792,
43
+ -0.1448526906967163,
44
+ -0.165178582072258,
45
+ -0.3712506741285324,
46
+ -0.20844019144773485,
47
+ -0.572006106376648,
48
+ -0.33626349329948424
49
+ ],
50
+ "q99": [
51
+ 0.43300133407115937,
52
+ 0.0608543787896633,
53
+ 0.11066233366727829,
54
+ 0.24378718703985197,
55
+ 0.21668677031993866,
56
+ 0.16165857017040253,
57
+ 0.1128089502453804
58
+ ]
59
+ },
60
+ "right_arm": {
61
+ "min": [
62
+ -0.8936996459960938,
63
+ -0.40124306082725525,
64
+ -0.334096223115921,
65
+ -0.7009453773498535,
66
+ -0.6440802216529846,
67
+ -0.6765852570533752,
68
+ -0.4219137728214264
69
+ ],
70
+ "max": [
71
+ 0.5915054082870483,
72
+ 0.15196077525615692,
73
+ 0.2820966839790344,
74
+ 0.9750485420227051,
75
+ 0.4333256185054779,
76
+ 0.4209703803062439,
77
+ 0.42796531319618225
78
+ ],
79
+ "mean": [
80
+ 0.04414856582880021,
81
+ -0.039402940124273304,
82
+ 0.02765313573181629,
83
+ 0.0030030488967895536,
84
+ -0.1231249049305916,
85
+ 0.011324192583560946,
86
+ 0.004370036162436008
87
+ ],
88
+ "std": [
89
+ 0.2889469780941603,
90
+ 0.11182052314797031,
91
+ 0.11124749730967896,
92
+ 0.2925461096898047,
93
+ 0.1332219540129755,
94
+ 0.24104207171018366,
95
+ 0.10479299808683346
96
+ ],
97
+ "q01": [
98
+ -0.7506175482273102,
99
+ -0.39057492196559906,
100
+ -0.25775383472442626,
101
+ -0.5885431468486786,
102
+ -0.5196446216106415,
103
+ -0.6235537135601044,
104
+ -0.32685558140277865
105
+ ],
106
+ "q99": [
107
+ 0.5253757214546204,
108
+ 0.14831757545471191,
109
+ 0.2747608947753906,
110
+ 0.7634351658821106,
111
+ 0.2827685016393662,
112
+ 0.40120503306388855,
113
+ 0.3053036081790924
114
+ ]
115
+ },
116
+ "left_hand": {
117
+ "min": [
118
+ -0.4308507740497589,
119
+ -0.07498734444379807,
120
+ -7.149366865633056e-05,
121
+ -1.1343876123428345,
122
+ -1.7291572093963623,
123
+ -0.9262771606445312,
124
+ -1.7393150329589844
125
+ ],
126
+ "max": [
127
+ 0.4561558961868286,
128
+ 0.8283541202545166,
129
+ 1.7212395668029785,
130
+ -0.5737205147743225,
131
+ -1.698238492012024,
132
+ -0.4994395077228546,
133
+ -1.3350472450256348
134
+ ],
135
+ "mean": [
136
+ -0.03712950199842453,
137
+ 0.3074806332588196,
138
+ 1.1448044300079347,
139
+ -0.9231053590774537,
140
+ -1.728281021118164,
141
+ -0.7174015879631043,
142
+ -1.7059168100357056
143
+ ],
144
+ "std": [
145
+ 0.09748336171476871,
146
+ 0.15427202150631328,
147
+ 0.274658451937663,
148
+ 0.05865520572900224,
149
+ 0.0005646438281986904,
150
+ 0.047736643729687146,
151
+ 0.03906569018793539
152
+ ],
153
+ "q01": [
154
+ -0.4308471977710724,
155
+ -0.048106592148542404,
156
+ 0.0007971279555931687,
157
+ -1.0903623485565186,
158
+ -1.7291456460952759,
159
+ -0.9106875658035278,
160
+ -1.7393051385879517
161
+ ],
162
+ "q99": [
163
+ 0.41194939613342285,
164
+ 0.8228176236152649,
165
+ 1.702892849445343,
166
+ -0.576155914068222,
167
+ -1.7277528047561646,
168
+ -0.5658304691314697,
169
+ -1.455558454990387
170
+ ]
171
+ },
172
+ "right_hand": {
173
+ "min": [
174
+ -0.5703738331794739,
175
+ -0.8952012062072754,
176
+ -1.532240390777588,
177
+ 0.3799384534358978,
178
+ 1.5859814882278442,
179
+ 0.35724613070487976,
180
+ 0.015858886763453484
181
+ ],
182
+ "max": [
183
+ 0.42615073919296265,
184
+ -0.13926155865192413,
185
+ -0.026440581306815147,
186
+ 1.1064085960388184,
187
+ 1.7162147760391235,
188
+ 0.8596940636634827,
189
+ 1.7120684385299683
190
+ ],
191
+ "mean": [
192
+ -0.13272999529726803,
193
+ -0.43299270272254947,
194
+ -1.0125190734863283,
195
+ 0.8015245676040651,
196
+ 1.7134080171585084,
197
+ 0.5877324700355531,
198
+ 1.296524953842163
199
+ ],
200
+ "std": [
201
+ 0.18466377169158915,
202
+ 0.10091533133867281,
203
+ 0.24365559376935036,
204
+ 0.11026378122504048,
205
+ 0.0069701885688247785,
206
+ 0.062232877237592195,
207
+ 0.7004376526967906
208
+ ],
209
+ "q01": [
210
+ -0.5532291948795318,
211
+ -0.8328991675376892,
212
+ -1.4858814477920532,
213
+ 0.47850008606910704,
214
+ 1.6556979274749757,
215
+ 0.4564783811569214,
216
+ 0.015995755791664124
217
+ ],
218
+ "q99": [
219
+ 0.4223690330982208,
220
+ -0.16838766634464264,
221
+ -0.02651035785675049,
222
+ 1.074120774269104,
223
+ 1.716203212738037,
224
+ 0.7812296414375303,
225
+ 1.7061939239501953
226
+ ]
227
+ },
228
+ "left_leg": {
229
+ "min": [
230
+ -1.422214150428772,
231
+ -0.07056652009487152,
232
+ -0.2589189112186432,
233
+ 0.6974243521690369,
234
+ -0.8536531329154968,
235
+ -0.13676193356513977
236
+ ],
237
+ "max": [
238
+ -0.28842952847480774,
239
+ 0.08445075154304504,
240
+ 0.02804262936115265,
241
+ 1.9131978750228882,
242
+ -0.3375410735607147,
243
+ 0.027291197329759598
244
+ ],
245
+ "mean": [
246
+ -1.0596442937850954,
247
+ 0.002535492181777954,
248
+ -0.07085499209351838,
249
+ 1.5717456579208375,
250
+ -0.6964917659759522,
251
+ -0.03794607711024583
252
+ ],
253
+ "std": [
254
+ 0.40223161067712854,
255
+ 0.03260776146699287,
256
+ 0.0784275233882657,
257
+ 0.47068857015535376,
258
+ 0.17706840196832876,
259
+ 0.04464015894489976
260
+ ],
261
+ "q01": [
262
+ -1.4216517210006714,
263
+ -0.0668594092130661,
264
+ -0.2586912512779236,
265
+ 0.7006627917289734,
266
+ -0.8510374426841736,
267
+ -0.13286696255207062
268
+ ],
269
+ "q99": [
270
+ -0.29943767189979553,
271
+ 0.08303046196699143,
272
+ 0.02635524980723858,
273
+ 1.9129592180252075,
274
+ -0.33904141187667847,
275
+ 0.024954118449240923
276
+ ]
277
+ },
278
+ "right_leg": {
279
+ "min": [
280
+ -1.4187724590301514,
281
+ -0.0656202882528305,
282
+ 0.00026783792418427765,
283
+ 0.6745510101318359,
284
+ -0.8212981820106506,
285
+ 0.013007540255784988
286
+ ],
287
+ "max": [
288
+ -0.3165123462677002,
289
+ 0.06480216234922409,
290
+ 0.18107181787490845,
291
+ 1.9028350114822388,
292
+ -0.2781081795692444,
293
+ 0.12130604684352875
294
+ ],
295
+ "mean": [
296
+ -1.073228693008423,
297
+ -0.02168194577097893,
298
+ 0.07015937007963657,
299
+ 1.5623790979385377,
300
+ -0.6487995862960816,
301
+ 0.05324371419847013
302
+ ],
303
+ "std": [
304
+ 0.394134622213972,
305
+ 0.03250279958078939,
306
+ 0.04818224696063722,
307
+ 0.4701026346197213,
308
+ 0.18021373812735458,
309
+ 0.029904964425521728
310
+ ],
311
+ "q01": [
312
+ -1.407522473335266,
313
+ -0.06545274674892426,
314
+ 0.0009642164804972708,
315
+ 0.6817948222160339,
316
+ -0.8203311705589295,
317
+ 0.013861786108464003
318
+ ],
319
+ "q99": [
320
+ -0.3197129964828491,
321
+ 0.06012352555990219,
322
+ 0.17919695377349854,
323
+ 1.9026389122009277,
324
+ -0.2990111708641052,
325
+ 0.11856115609407425
326
+ ]
327
+ }
328
+ },
329
+ "action": {
330
+ "left_arm": {
331
+ "min": [
332
+ -0.043507836759090424,
333
+ -0.19300206005573273,
334
+ -0.17000995576381683,
335
+ -0.457133024930954,
336
+ -0.2558232545852661,
337
+ -0.7767539620399475,
338
+ -0.3854255974292755
339
+ ],
340
+ "max": [
341
+ 0.46476179361343384,
342
+ 0.09925973415374756,
343
+ 0.1419534981250763,
344
+ 0.3048163950443268,
345
+ 0.27031639218330383,
346
+ 0.1420191526412964,
347
+ 0.17619135975837708
348
+ ],
349
+ "mean": [
350
+ 0.1696782998740673,
351
+ -0.023585575632750988,
352
+ -0.01505371122621,
353
+ -0.07738002762198448,
354
+ -0.014382923254743218,
355
+ -0.10339359939098357,
356
+ -0.06676231212913991
357
+ ],
358
+ "std": [
359
+ 0.12803843788956662,
360
+ 0.04662123028997118,
361
+ 0.06390597882701712,
362
+ 0.15199993519216853,
363
+ 0.08522864821570098,
364
+ 0.241558620952415,
365
+ 0.0932142691085183
366
+ ],
367
+ "q01": [
368
+ -0.00044505414320155967,
369
+ -0.14284758090972902,
370
+ -0.16365637511014938,
371
+ -0.416116259098053,
372
+ -0.23396779596805573,
373
+ -0.710577826499939,
374
+ -0.34575263142585755
375
+ ],
376
+ "q99": [
377
+ 0.4313181138038635,
378
+ 0.07442810654640193,
379
+ 0.11147458210587498,
380
+ 0.19673391342163074,
381
+ 0.2185818526148796,
382
+ 0.13099999725818634,
383
+ 0.11929280713200568
384
+ ]
385
+ },
386
+ "right_arm": {
387
+ "min": [
388
+ -1.0275291204452515,
389
+ -0.4643804132938385,
390
+ -0.3515032231807709,
391
+ -0.7469795942306519,
392
+ -0.6372695565223694,
393
+ -0.6936403512954712,
394
+ -0.4810679852962494
395
+ ],
396
+ "max": [
397
+ 0.6011592745780945,
398
+ 0.1821075826883316,
399
+ 0.2879699170589447,
400
+ 0.9328665733337402,
401
+ 0.4629268944263458,
402
+ 0.32991257309913635,
403
+ 0.4385724365711212
404
+ ],
405
+ "mean": [
406
+ 0.00891136899590492,
407
+ -0.05151632176712156,
408
+ 0.021579982340335847,
409
+ -0.04002549201250076,
410
+ -0.11145458705723287,
411
+ -0.05795883238315584,
412
+ -0.010265008732676505
413
+ ],
414
+ "std": [
415
+ 0.3140306216330988,
416
+ 0.12338726576862097,
417
+ 0.11440506640083221,
418
+ 0.2951592361914636,
419
+ 0.13178699409892775,
420
+ 0.24463186503015064,
421
+ 0.12069990363573485
422
+ ],
423
+ "q01": [
424
+ -0.8609240639209748,
425
+ -0.44051639556884764,
426
+ -0.2632049125432968,
427
+ -0.6346961855888367,
428
+ -0.5051181244850159,
429
+ -0.646638994216919,
430
+ -0.36353427231311797
431
+ ],
432
+ "q99": [
433
+ 0.5331923937797547,
434
+ 0.16028301090002037,
435
+ 0.2741725987195968,
436
+ 0.7285317516326905,
437
+ 0.29524138689041146,
438
+ 0.2957756376266479,
439
+ 0.31068606793880466
440
+ ]
441
+ },
442
+ "left_hand": {
443
+ "min": [
444
+ -0.4885875880718231,
445
+ -0.08734771609306335,
446
+ -0.0002891957410611212,
447
+ -1.1437904834747314,
448
+ -1.7463293075561523,
449
+ -0.9773809909820557,
450
+ -1.7463293075561523
451
+ ],
452
+ "max": [
453
+ 0.49287089705467224,
454
+ 0.8828208446502686,
455
+ 1.7463287115097046,
456
+ -0.5561106204986572,
457
+ -1.679382562637329,
458
+ -0.4723009169101715,
459
+ -1.3175609111785889
460
+ ],
461
+ "mean": [
462
+ -0.035645636916160586,
463
+ 0.30426059365272523,
464
+ 1.1468801975250247,
465
+ -0.9229173660278321,
466
+ -1.7390141725540165,
467
+ -0.7169671654701233,
468
+ -1.7152442932128908
469
+ ],
470
+ "std": [
471
+ 0.10558863806405731,
472
+ 0.15556354053314278,
473
+ 0.274966213378277,
474
+ 0.059852234904666776,
475
+ 0.008721348223316,
476
+ 0.0483313217931266,
477
+ 0.04397918845464365
478
+ ],
479
+ "q01": [
480
+ -0.4792321288585663,
481
+ -0.06071115307509899,
482
+ 0.0005660237569827586,
483
+ -1.0848853015899658,
484
+ -1.7463293075561523,
485
+ -0.9110012412071228,
486
+ -1.7463293075561523
487
+ ],
488
+ "q99": [
489
+ 0.46538531184196474,
490
+ 0.818757187128067,
491
+ 1.7278532934188842,
492
+ -0.5726673984527587,
493
+ -1.7283095121383667,
494
+ -0.5693804585933687,
495
+ -1.4647188830375673
496
+ ]
497
+ },
498
+ "right_hand": {
499
+ "min": [
500
+ -0.7485748529434204,
501
+ -0.9209996461868286,
502
+ -1.5628033876419067,
503
+ 0.3053888976573944,
504
+ 1.452519416809082,
505
+ 0.297732949256897,
506
+ -0.000501211907248944
507
+ ],
508
+ "max": [
509
+ 0.4749326705932617,
510
+ -0.09852339327335358,
511
+ 0.00017406446568202227,
512
+ 1.1213138103485107,
513
+ 1.7463293075561523,
514
+ 0.9700140953063965,
515
+ 1.7463293075561523
516
+ ],
517
+ "mean": [
518
+ -0.13885343677829953,
519
+ -0.4348726630210877,
520
+ -1.01420636177063,
521
+ 0.8012608528137208,
522
+ 1.7315913438796997,
523
+ 0.5789052844047546,
524
+ 1.3044527173042297
525
+ ],
526
+ "std": [
527
+ 0.21074555852641977,
528
+ 0.10747303052138377,
529
+ 0.25150596300844263,
530
+ 0.11569333127690982,
531
+ 0.01825667187158041,
532
+ 0.07030335456707495,
533
+ 0.7244846008214193
534
+ ],
535
+ "q01": [
536
+ -0.6408199524879455,
537
+ -0.9006346189975738,
538
+ -1.4945218634605408,
539
+ 0.41831121981143954,
540
+ 1.6595230221748352,
541
+ 0.4047985315322876,
542
+ -0.00029744467115961015
543
+ ],
544
+ "q99": [
545
+ 0.45639603078365326,
546
+ -0.1524571916460991,
547
+ -0.0003798419167287625,
548
+ 1.0795005273818967,
549
+ 1.7463293075561523,
550
+ 0.7881050503253937,
551
+ 1.7463293075561523
552
+ ]
553
+ },
554
+ "navigate_command": {
555
+ "min": [
556
+ 0.0,
557
+ 0.0,
558
+ 0.0
559
+ ],
560
+ "max": [
561
+ 0.0,
562
+ 0.0,
563
+ 0.0
564
+ ],
565
+ "mean": [
566
+ 0.0,
567
+ 0.0,
568
+ 0.0
569
+ ],
570
+ "std": [
571
+ 0.0,
572
+ 0.0,
573
+ 0.0
574
+ ],
575
+ "q01": [
576
+ 0.0,
577
+ 0.0,
578
+ 0.0
579
+ ],
580
+ "q99": [
581
+ 0.0,
582
+ 0.0,
583
+ 0.0
584
+ ]
585
+ },
586
+ "base_height_command": {
587
+ "min": [
588
+ 0.5
589
+ ],
590
+ "max": [
591
+ 0.7400000095367432
592
+ ],
593
+ "mean": [
594
+ 0.5754452347755432
595
+ ],
596
+ "std": [
597
+ 0.09933651543404387
598
+ ],
599
+ "q01": [
600
+ 0.5
601
+ ],
602
+ "q99": [
603
+ 0.7400000095367432
604
+ ]
605
+ },
606
+ "waist": {
607
+ "min": [
608
+ 0.0,
609
+ 0.0,
610
+ 0.0
611
+ ],
612
+ "max": [
613
+ 0.0,
614
+ 0.0,
615
+ 0.0
616
+ ],
617
+ "mean": [
618
+ 0.0,
619
+ 0.0,
620
+ 0.0
621
+ ],
622
+ "std": [
623
+ 0.0,
624
+ 0.0,
625
+ 0.0
626
+ ],
627
+ "q01": [
628
+ 0.0,
629
+ 0.0,
630
+ 0.0
631
+ ],
632
+ "q99": [
633
+ 0.0,
634
+ 0.0,
635
+ 0.0
636
+ ]
637
+ }
638
+ },
639
+ "relative_action": {
640
+ "left_arm": {
641
+ "min": [
642
+ [
643
+ -0.15515196323394775,
644
+ -0.045736201107501984,
645
+ -0.024492042139172554,
646
+ -0.10552060604095459,
647
+ -0.15750135481357574,
648
+ -0.25853198766708374,
649
+ -0.09323412925004959
650
+ ],
651
+ [
652
+ -0.18419429659843445,
653
+ -0.05562318116426468,
654
+ -0.03031637705862522,
655
+ -0.12201627343893051,
656
+ -0.17836607992649078,
657
+ -0.30837374925613403,
658
+ -0.09634796530008316
659
+ ],
660
+ [
661
+ -0.19664421677589417,
662
+ -0.058419905602931976,
663
+ -0.03524252772331238,
664
+ -0.13793230056762695,
665
+ -0.17960046231746674,
666
+ -0.33252033591270447,
667
+ -0.0994604229927063
668
+ ],
669
+ [
670
+ -0.19699177145957947,
671
+ -0.059031106531620026,
672
+ -0.03758798912167549,
673
+ -0.1468992680311203,
674
+ -0.1793607771396637,
675
+ -0.3230270743370056,
676
+ -0.10582402348518372
677
+ ],
678
+ [
679
+ -0.19705168902873993,
680
+ -0.059378646314144135,
681
+ -0.039457447826862335,
682
+ -0.15880872309207916,
683
+ -0.1793607771396637,
684
+ -0.3155611455440521,
685
+ -0.11980853229761124
686
+ ],
687
+ [
688
+ -0.1971116065979004,
689
+ -0.059678249061107635,
690
+ -0.04326734319329262,
691
+ -0.16800417006015778,
692
+ -0.17934879660606384,
693
+ -0.3084644377231598,
694
+ -0.13472649455070496
695
+ ],
696
+ [
697
+ -0.19714756309986115,
698
+ -0.06220733001828194,
699
+ -0.046173516660928726,
700
+ -0.17263169586658478,
701
+ -0.1793607771396637,
702
+ -0.31981441378593445,
703
+ -0.1477247178554535
704
+ ],
705
+ [
706
+ -0.1971355676651001,
707
+ -0.06931373476982117,
708
+ -0.05177014693617821,
709
+ -0.17713414132595062,
710
+ -0.17934879660606384,
711
+ -0.33190205693244934,
712
+ -0.15818646550178528
713
+ ],
714
+ [
715
+ -0.19764932990074158,
716
+ -0.07576124370098114,
717
+ -0.05683555454015732,
718
+ -0.1878727674484253,
719
+ -0.17934879660606384,
720
+ -0.3419594466686249,
721
+ -0.1691160798072815
722
+ ],
723
+ [
724
+ -0.19891291856765747,
725
+ -0.07885689288377762,
726
+ -0.061009373515844345,
727
+ -0.1966828852891922,
728
+ -0.1793847382068634,
729
+ -0.3514649569988251,
730
+ -0.18021441996097565
731
+ ],
732
+ [
733
+ -0.20028606057167053,
734
+ -0.08187691122293472,
735
+ -0.06441289186477661,
736
+ -0.2035556584596634,
737
+ -0.17939673364162445,
738
+ -0.36611804366111755,
739
+ -0.18831488490104675
740
+ ],
741
+ [
742
+ -0.20100006461143494,
743
+ -0.08386436104774475,
744
+ -0.06752326339483261,
745
+ -0.20997919142246246,
746
+ -0.1880127191543579,
747
+ -0.38052210211753845,
748
+ -0.19379886984825134
749
+ ],
750
+ [
751
+ -0.20134761929512024,
752
+ -0.08490698784589767,
753
+ -0.06970439106225967,
754
+ -0.21427622437477112,
755
+ -0.19453883171081543,
756
+ -0.3942354917526245,
757
+ -0.200724795460701
758
+ ],
759
+ [
760
+ -0.2014075368642807,
761
+ -0.08538635820150375,
762
+ -0.07367339730262756,
763
+ -0.2224951982498169,
764
+ -0.19935956597328186,
765
+ -0.4063231348991394,
766
+ -0.20607827603816986
767
+ ],
768
+ [
769
+ -0.20146745443344116,
770
+ -0.08548223227262497,
771
+ -0.07784721255302429,
772
+ -0.2324540913105011,
773
+ -0.20202352106571198,
774
+ -0.41638052463531494,
775
+ -0.20973870158195496
776
+ ],
777
+ [
778
+ -0.20150341093540192,
779
+ -0.08556612581014633,
780
+ -0.08201441168785095,
781
+ -0.241264209151268,
782
+ -0.20417222380638123,
783
+ -0.4251384735107422,
784
+ -0.21286801993846893
785
+ ]
786
+ ],
787
+ "max": [
788
+ [
789
+ 0.015977757051587105,
790
+ 0.04404575005173683,
791
+ 0.03901921212673187,
792
+ 0.10091377049684525,
793
+ 0.08788624405860901,
794
+ 0.016215423122048378,
795
+ 0.10296215116977692
796
+ ],
797
+ [
798
+ 0.02142711542546749,
799
+ 0.051759835332632065,
800
+ 0.051609497517347336,
801
+ 0.1366765797138214,
802
+ 0.09392031282186508,
803
+ 0.030708149075508118,
804
+ 0.10341467708349228
805
+ ],
806
+ [
807
+ 0.026808014139533043,
808
+ 0.05809210240840912,
809
+ 0.05343788117170334,
810
+ 0.15200598537921906,
811
+ 0.10163187980651855,
812
+ 0.04287641495466232,
813
+ 0.10454358160495758
814
+ ],
815
+ [
816
+ 0.031116759404540062,
817
+ 0.06432388722896576,
818
+ 0.05431273207068443,
819
+ 0.1322161704301834,
820
+ 0.10834864526987076,
821
+ 0.051765017211437225,
822
+ 0.10622887313365936
823
+ ],
824
+ [
825
+ 0.03384915739297867,
826
+ 0.07279147207736969,
827
+ 0.05241922289133072,
828
+ 0.11756332218647003,
829
+ 0.11264035105705261,
830
+ 0.0635506734251976,
831
+ 0.10709062218666077
832
+ ],
833
+ [
834
+ 0.036437757313251495,
835
+ 0.08065453171730042,
836
+ 0.052275411784648895,
837
+ 0.11934897303581238,
838
+ 0.11375512182712555,
839
+ 0.0757189393043518,
840
+ 0.10738974809646606
841
+ ],
842
+ [
843
+ 0.038808662444353104,
844
+ 0.08584894239902496,
845
+ 0.052143584936857224,
846
+ 0.12053541094064713,
847
+ 0.1148640587925911,
848
+ 0.08740021288394928,
849
+ 0.10774257034063339
850
+ ],
851
+ [
852
+ 0.0408271886408329,
853
+ 0.09017525613307953,
854
+ 0.05222747474908829,
855
+ 0.12393893301486969,
856
+ 0.11744704842567444,
857
+ 0.09852923452854156,
858
+ 0.10797266662120819
859
+ ],
860
+ [
861
+ 0.04284054413437843,
862
+ 0.09323757886886597,
863
+ 0.053877439349889755,
864
+ 0.12637172639369965,
865
+ 0.11899301409721375,
866
+ 0.10976564884185791,
867
+ 0.10800334066152573
868
+ ],
869
+ [
870
+ 0.044674474745988846,
871
+ 0.09445846080780029,
872
+ 0.05536110699176788,
873
+ 0.12682713568210602,
874
+ 0.12020821869373322,
875
+ 0.11865425109863281,
876
+ 0.10801101475954056
877
+ ],
878
+ [
879
+ 0.04695464298129082,
880
+ 0.09523697197437286,
881
+ 0.05654754489660263,
882
+ 0.12709078192710876,
883
+ 0.1205090582370758,
884
+ 0.12692444026470184,
885
+ 0.1101171150803566
886
+ ],
887
+ [
888
+ 0.05064055323600769,
889
+ 0.0952242910861969,
890
+ 0.05768439918756485,
891
+ 0.12719863653182983,
892
+ 0.12062890827655792,
893
+ 0.14295460283756256,
894
+ 0.11454012989997864
895
+ ],
896
+ [
897
+ 0.053056422621011734,
898
+ 0.09705071896314621,
899
+ 0.05849326401948929,
900
+ 0.1311093121767044,
901
+ 0.12070954591035843,
902
+ 0.15608549118041992,
903
+ 0.12060114741325378
904
+ ],
905
+ [
906
+ 0.05454246699810028,
907
+ 0.09841693192720413,
908
+ 0.05857622250914574,
909
+ 0.1408339887857437,
910
+ 0.12073350697755814,
911
+ 0.16915448009967804,
912
+ 0.12555661797523499
913
+ ],
914
+ [
915
+ 0.0553046353161335,
916
+ 0.09922627359628677,
917
+ 0.058597978204488754,
918
+ 0.14853587746620178,
919
+ 0.12078144401311874,
920
+ 0.1792045384645462,
921
+ 0.12993615865707397
922
+ ],
923
+ [
924
+ 0.0578119158744812,
925
+ 0.09952587634325027,
926
+ 0.058577150106430054,
927
+ 0.15937454998493195,
928
+ 0.12015275657176971,
929
+ 0.18646536767482758,
930
+ 0.13356173038482666
931
+ ]
932
+ ],
933
+ "mean": [
934
+ [
935
+ -0.021402371861040593,
936
+ 0.007016860181465745,
937
+ 0.0017488324316218497,
938
+ -0.04416413977742195,
939
+ -0.005079981824383141,
940
+ -0.06916978061199189,
941
+ 0.0016838881652802232
942
+ ],
943
+ [
944
+ -0.021368065476417543,
945
+ 0.007029336644336581,
946
+ 0.0017471882281824946,
947
+ -0.0441769689321518,
948
+ -0.005100805591791869,
949
+ -0.06917818263173103,
950
+ 0.0016575533198192722
951
+ ],
952
+ [
953
+ -0.021334410272538663,
954
+ 0.007041676715016365,
955
+ 0.0017458805814385414,
956
+ -0.044189384579658514,
957
+ -0.005122201563790441,
958
+ -0.06918637081980705,
959
+ 0.0016304589342325932
960
+ ],
961
+ [
962
+ -0.02130136825144291,
963
+ 0.007053840905427933,
964
+ 0.0017448439029976726,
965
+ -0.04420130476355552,
966
+ -0.005143963219597937,
967
+ -0.06919488236308098,
968
+ 0.0016027647070586686
969
+ ],
970
+ [
971
+ -0.021268879622220994,
972
+ 0.007065710611641407,
973
+ 0.00174398438539356,
974
+ -0.044212906062603,
975
+ -0.0051660287193954005,
976
+ -0.06920353248715401,
977
+ 0.0015745056327432397
978
+ ],
979
+ [
980
+ -0.021236796118319034,
981
+ 0.007077315077185631,
982
+ 0.0017431867076084019,
983
+ -0.04422404021024705,
984
+ -0.005188274057582022,
985
+ -0.06921244636178017,
986
+ 0.0015456811990588906
987
+ ],
988
+ [
989
+ -0.0212050361558795,
990
+ 0.007088677026331425,
991
+ 0.0017423689598217607,
992
+ -0.044234873354434975,
993
+ -0.005210548732429743,
994
+ -0.06922157108783722,
995
+ 0.0015163469128310681
996
+ ],
997
+ [
998
+ -0.021173332259058954,
999
+ 0.007099926518276334,
1000
+ 0.0017415070440620186,
1001
+ -0.0442459836602211,
1002
+ -0.005233019636943938,
1003
+ -0.06923108398914338,
1004
+ 0.0014864533208310605
1005
+ ],
1006
+ [
1007
+ -0.021141689084470273,
1008
+ 0.007111105415970087,
1009
+ 0.001740648946724832,
1010
+ -0.04425747245550156,
1011
+ -0.005255755176767708,
1012
+ -0.06924024298787117,
1013
+ 0.0014559679664671423
1014
+ ],
1015
+ [
1016
+ -0.02111002951860428,
1017
+ 0.007122210739180446,
1018
+ 0.00173980207182467,
1019
+ -0.044269248843193054,
1020
+ -0.005278446385636926,
1021
+ -0.06924975737929344,
1022
+ 0.0014251190237700938
1023
+ ],
1024
+ [
1025
+ -0.0210784412920475,
1026
+ 0.007133308099582791,
1027
+ 0.0017389464424923064,
1028
+ -0.04428143575787545,
1029
+ -0.0053012786898762,
1030
+ -0.06925870105624199,
1031
+ 0.001393885212019086
1032
+ ],
1033
+ [
1034
+ -0.021046884916722776,
1035
+ 0.007144404482096434,
1036
+ 0.0017380359116941691,
1037
+ -0.044293709099292755,
1038
+ -0.005324012087658049,
1039
+ -0.0692678950726986,
1040
+ 0.0013625514693558216
1041
+ ],
1042
+ [
1043
+ -0.021015271544456482,
1044
+ 0.007155424822121859,
1045
+ 0.0017369574401527644,
1046
+ -0.04430628940463066,
1047
+ -0.005346387391909958,
1048
+ -0.06927687674760818,
1049
+ 0.001331423781812191
1050
+ ],
1051
+ [
1052
+ -0.020983435213565826,
1053
+ 0.007166293263435364,
1054
+ 0.0017356475349515677,
1055
+ -0.04431931748986244,
1056
+ -0.005368281574919821,
1057
+ -0.06928601041436196,
1058
+ 0.0013006654102355243
1059
+ ],
1060
+ [
1061
+ -0.020951600931584834,
1062
+ 0.007176961842924357,
1063
+ 0.001734083564952016,
1064
+ -0.04433249309659004,
1065
+ -0.005389722296968104,
1066
+ -0.06929543614387512,
1067
+ 0.001270345645025373
1068
+ ],
1069
+ [
1070
+ -0.02091988679021597,
1071
+ 0.007187485601752997,
1072
+ 0.0017323867650702598,
1073
+ -0.04434554725885391,
1074
+ -0.005410949653014542,
1075
+ -0.06930534169077873,
1076
+ 0.0012403813190758227
1077
+ ]
1078
+ ],
1079
+ "std": [
1080
+ [
1081
+ 0.012273319832679926,
1082
+ 0.004792123389772546,
1083
+ 0.0030629948425422164,
1084
+ 0.007962466087738,
1085
+ 0.0188870695854604,
1086
+ 0.05276113561431864,
1087
+ 0.020519022787874937
1088
+ ],
1089
+ [
1090
+ 0.01252153806021515,
1091
+ 0.005350438755626391,
1092
+ 0.003435064834485828,
1093
+ 0.009172896657510952,
1094
+ 0.019126292577808984,
1095
+ 0.053030882190167265,
1096
+ 0.020655243013669094
1097
+ ],
1098
+ [
1099
+ 0.012791952525106445,
1100
+ 0.005909996646578049,
1101
+ 0.003810785061145414,
1102
+ 0.010355404048512736,
1103
+ 0.019375741407152505,
1104
+ 0.05331453488746271,
1105
+ 0.020803847618059663
1106
+ ],
1107
+ [
1108
+ 0.013078449505367155,
1109
+ 0.006452405545210825,
1110
+ 0.004179990655314982,
1111
+ 0.011489935909975086,
1112
+ 0.01962937045963947,
1113
+ 0.05360755780341504,
1114
+ 0.020961930686369406
1115
+ ],
1116
+ [
1117
+ 0.013377984664266079,
1118
+ 0.006969789090336009,
1119
+ 0.004539178421112893,
1120
+ 0.01257738748810594,
1121
+ 0.019884147419194417,
1122
+ 0.05390695489366628,
1123
+ 0.021127320350195357
1124
+ ],
1125
+ [
1126
+ 0.013688189982906206,
1127
+ 0.0074598205342514936,
1128
+ 0.004887214786150608,
1129
+ 0.013619318953561093,
1130
+ 0.02013809096964683,
1131
+ 0.054210225914114694,
1132
+ 0.02129829149154329
1133
+ ],
1134
+ [
1135
+ 0.014006825917739059,
1136
+ 0.007922757861353248,
1137
+ 0.005224418357530658,
1138
+ 0.014617080869127561,
1139
+ 0.020390362705341735,
1140
+ 0.05451553600810269,
1141
+ 0.02147351687459061
1142
+ ],
1143
+ [
1144
+ 0.01433181329642198,
1145
+ 0.008360113095242394,
1146
+ 0.005551402836393504,
1147
+ 0.015573151899356104,
1148
+ 0.02064040487350716,
1149
+ 0.054821497729958445,
1150
+ 0.021652210881399805
1151
+ ],
1152
+ [
1153
+ 0.014661087324725224,
1154
+ 0.008773910272573335,
1155
+ 0.005868315953589183,
1156
+ 0.016488027405602193,
1157
+ 0.020887151862210514,
1158
+ 0.05512624945632845,
1159
+ 0.021833420208201684
1160
+ ],
1161
+ [
1162
+ 0.014992968906860098,
1163
+ 0.009166447667377632,
1164
+ 0.006175156264237689,
1165
+ 0.017364618480518082,
1166
+ 0.021129904783352783,
1167
+ 0.05542966838809374,
1168
+ 0.022016449929376375
1169
+ ],
1170
+ [
1171
+ 0.015326514360498433,
1172
+ 0.009539896101054789,
1173
+ 0.00647201137721252,
1174
+ 0.018206403222399865,
1175
+ 0.021368382557532623,
1176
+ 0.05573005636716217,
1177
+ 0.0222003585857611
1178
+ ],
1179
+ [
1180
+ 0.015660820174437182,
1181
+ 0.009896239640522388,
1182
+ 0.006758953073386748,
1183
+ 0.019016733609354953,
1184
+ 0.0216021971783099,
1185
+ 0.05602749188658284,
1186
+ 0.02238440948172053
1187
+ ],
1188
+ [
1189
+ 0.015995198988259435,
1190
+ 0.010237036542283114,
1191
+ 0.007036269749665774,
1192
+ 0.019798116775747,
1193
+ 0.021831338430420455,
1194
+ 0.0563209740667644,
1195
+ 0.022567674313631488
1196
+ ],
1197
+ [
1198
+ 0.016329123058304185,
1199
+ 0.01056316893917735,
1200
+ 0.007304446760275671,
1201
+ 0.020552256987126895,
1202
+ 0.022055744446104215,
1203
+ 0.056610439569026956,
1204
+ 0.022749615877705326
1205
+ ],
1206
+ [
1207
+ 0.016661927242001676,
1208
+ 0.0108755773082198,
1209
+ 0.007563938410885428,
1210
+ 0.021280890840779176,
1211
+ 0.022275929167205832,
1212
+ 0.05689602926733075,
1213
+ 0.022930184326651865
1214
+ ],
1215
+ [
1216
+ 0.01699328441277866,
1217
+ 0.011175034823533889,
1218
+ 0.007815005959072195,
1219
+ 0.021985668110932152,
1220
+ 0.022492188108221147,
1221
+ 0.057178159879313684,
1222
+ 0.02310899004158941
1223
+ ]
1224
+ ],
1225
+ "q01": [
1226
+ [
1227
+ -0.04622314259409904,
1228
+ -0.008307285904884339,
1229
+ -0.008490394130349159,
1230
+ -0.0676896670460701,
1231
+ -0.07445713311433792,
1232
+ -0.19362182378768922,
1233
+ -0.061928598135709764
1234
+ ],
1235
+ [
1236
+ -0.046612609177827835,
1237
+ -0.011650921180844306,
1238
+ -0.010291783660650254,
1239
+ -0.07346388190984726,
1240
+ -0.07535391837358475,
1241
+ -0.19773479521274567,
1242
+ -0.06232737243175507
1243
+ ],
1244
+ [
1245
+ -0.04743372246623039,
1246
+ -0.014996273964643479,
1247
+ -0.011952423937618733,
1248
+ -0.07862176477909089,
1249
+ -0.07632588148117066,
1250
+ -0.20148021459579468,
1251
+ -0.0628684788942337
1252
+ ],
1253
+ [
1254
+ -0.04855104118585587,
1255
+ -0.0177967519313097,
1256
+ -0.013546783439815045,
1257
+ -0.08359089076519012,
1258
+ -0.07716200590133666,
1259
+ -0.20549540758132934,
1260
+ -0.06333057880401612
1261
+ ],
1262
+ [
1263
+ -0.0501094363629818,
1264
+ -0.020702863186597826,
1265
+ -0.01516138829290867,
1266
+ -0.08846967458724976,
1267
+ -0.07789466947317124,
1268
+ -0.20938783168792724,
1269
+ -0.06382771521806717
1270
+ ],
1271
+ [
1272
+ -0.05136882558465004,
1273
+ -0.02320220932364464,
1274
+ -0.016573313996195792,
1275
+ -0.09259165406227111,
1276
+ -0.07890617012977601,
1277
+ -0.21290286481380463,
1278
+ -0.06448370844125748
1279
+ ],
1280
+ [
1281
+ -0.052903211414813994,
1282
+ -0.025262738391757012,
1283
+ -0.017913958728313445,
1284
+ -0.09684110671281815,
1285
+ -0.08008257806301117,
1286
+ -0.21620362639427185,
1287
+ -0.06511226683855056
1288
+ ],
1289
+ [
1290
+ -0.05428747460246086,
1291
+ -0.027835426777601244,
1292
+ -0.019191644936800003,
1293
+ -0.1010166883468628,
1294
+ -0.08070138275623322,
1295
+ -0.21999299585819243,
1296
+ -0.06565656781196594
1297
+ ],
1298
+ [
1299
+ -0.055708788335323334,
1300
+ -0.030057060346007348,
1301
+ -0.02054811894893646,
1302
+ -0.10428876161575318,
1303
+ -0.08152741551399231,
1304
+ -0.2225034373998642,
1305
+ -0.06588516235351563
1306
+ ],
1307
+ [
1308
+ -0.057639194279909135,
1309
+ -0.03201180726289749,
1310
+ -0.022005736231803893,
1311
+ -0.10758236765861512,
1312
+ -0.08232917964458465,
1313
+ -0.2261549198627472,
1314
+ -0.0668011498451233
1315
+ ],
1316
+ [
1317
+ -0.0590645532310009,
1318
+ -0.034173800349235534,
1319
+ -0.023271598368883133,
1320
+ -0.11097691774368286,
1321
+ -0.0830073583126068,
1322
+ -0.22886166214942932,
1323
+ -0.06737253874540329
1324
+ ],
1325
+ [
1326
+ -0.06030508399009705,
1327
+ -0.03593111291527748,
1328
+ -0.02421952731907368,
1329
+ -0.11398636490106583,
1330
+ -0.08369743198156357,
1331
+ -0.2312383097410202,
1332
+ -0.06778757721185684
1333
+ ],
1334
+ [
1335
+ -0.06210161298513413,
1336
+ -0.03801637142896652,
1337
+ -0.02529083549976349,
1338
+ -0.11655550122261048,
1339
+ -0.08422239661216736,
1340
+ -0.23488296568393707,
1341
+ -0.06788509011268616
1342
+ ],
1343
+ [
1344
+ -0.06320643812417984,
1345
+ -0.03934221312403679,
1346
+ -0.0261855249106884,
1347
+ -0.11943628251552582,
1348
+ -0.0850162598490715,
1349
+ -0.2378564405441284,
1350
+ -0.0679780986905098
1351
+ ],
1352
+ [
1353
+ -0.064977488219738,
1354
+ -0.04063037306070328,
1355
+ -0.02722482807934284,
1356
+ -0.12161165028810501,
1357
+ -0.08582942545413971,
1358
+ -0.24111946940422058,
1359
+ -0.07087400406599045
1360
+ ],
1361
+ [
1362
+ -0.06639553010463714,
1363
+ -0.04221493750810623,
1364
+ -0.028230831772089005,
1365
+ -0.12370074927806854,
1366
+ -0.08680418074131012,
1367
+ -0.24414193391799927,
1368
+ -0.07360379338264465
1369
+ ]
1370
+ ],
1371
+ "q99": [
1372
+ [
1373
+ 0.004656263776123502,
1374
+ 0.02436448931694029,
1375
+ 0.012156174294650478,
1376
+ -0.020165615007281326,
1377
+ 0.05461462959647178,
1378
+ -0.010802201386541123,
1379
+ 0.07455390423536297
1380
+ ],
1381
+ [
1382
+ 0.0061347206495701995,
1383
+ 0.027462484985589976,
1384
+ 0.0132566840201616,
1385
+ -0.01396050214767462,
1386
+ 0.05510353460907929,
1387
+ -0.006891649439930913,
1388
+ 0.07488894909620285
1389
+ ],
1390
+ [
1391
+ 0.007568276859819833,
1392
+ 0.030595035627484314,
1393
+ 0.01467669893056151,
1394
+ -0.007382741030305783,
1395
+ 0.055912096500396545,
1396
+ -0.0024326939228922076,
1397
+ 0.07538597434759138
1398
+ ],
1399
+ [
1400
+ 0.008978409208357307,
1401
+ 0.0338817764818667,
1402
+ 0.01600337348878383,
1403
+ -0.0016752185812221143,
1404
+ 0.05657489240169484,
1405
+ 0.001060677256900832,
1406
+ 0.07566628962755194
1407
+ ],
1408
+ [
1409
+ 0.01023503120988604,
1410
+ 0.03642069160938259,
1411
+ 0.017350260913372037,
1412
+ 0.003946564402431037,
1413
+ 0.05737654566764822,
1414
+ 0.005060778502374913,
1415
+ 0.0760581329464911
1416
+ ],
1417
+ [
1418
+ 0.011601876951754055,
1419
+ 0.03897130548954006,
1420
+ 0.01881953127682199,
1421
+ 0.008261921443045125,
1422
+ 0.05812256291508671,
1423
+ 0.008765426408499494,
1424
+ 0.07639338612556455
1425
+ ],
1426
+ [
1427
+ 0.013003568723797784,
1428
+ 0.04092910945415467,
1429
+ 0.020135950669646224,
1430
+ 0.01292175862938164,
1431
+ 0.05934927597641904,
1432
+ 0.0121136478893459,
1433
+ 0.07654993385076521
1434
+ ],
1435
+ [
1436
+ 0.014437343217432471,
1437
+ 0.04305482417345034,
1438
+ 0.021616475954651753,
1439
+ 0.016878136768936878,
1440
+ 0.05997981607913957,
1441
+ 0.015435918681323537,
1442
+ 0.0768541258573532
1443
+ ],
1444
+ [
1445
+ 0.015775382667779822,
1446
+ 0.04493375435471532,
1447
+ 0.022830811887979502,
1448
+ 0.021722924262285226,
1449
+ 0.06104373887181273,
1450
+ 0.01801807787269355,
1451
+ 0.07715219050645807
1452
+ ],
1453
+ [
1454
+ 0.017329863533377645,
1455
+ 0.04692360147833819,
1456
+ 0.024125670641660674,
1457
+ 0.024902458265423547,
1458
+ 0.06203017443418499,
1459
+ 0.020264575481414814,
1460
+ 0.0773990827798843
1461
+ ],
1462
+ [
1463
+ 0.018666266351938233,
1464
+ 0.048278795778751354,
1465
+ 0.025639460161328256,
1466
+ 0.028584808409213562,
1467
+ 0.06278873711824409,
1468
+ 0.021760680302977606,
1469
+ 0.07787250339984869
1470
+ ],
1471
+ [
1472
+ 0.02000120505690571,
1473
+ 0.05004169970750797,
1474
+ 0.027209311127662554,
1475
+ 0.03209126487374243,
1476
+ 0.06339012384414656,
1477
+ 0.02494045548141008,
1478
+ 0.07832653790712349
1479
+ ],
1480
+ [
1481
+ 0.021443105861544465,
1482
+ 0.05132340997457504,
1483
+ 0.028269902020692815,
1484
+ 0.035937565267085914,
1485
+ 0.06374859243631363,
1486
+ 0.02682734336704017,
1487
+ 0.07869139432907092
1488
+ ],
1489
+ [
1490
+ 0.02305323384702202,
1491
+ 0.052831251472234504,
1492
+ 0.029321596175432136,
1493
+ 0.0393026396632187,
1494
+ 0.0641001376509666,
1495
+ 0.02886457357555628,
1496
+ 0.07899091005325252
1497
+ ],
1498
+ [
1499
+ 0.024395828917622528,
1500
+ 0.054699641019105905,
1501
+ 0.030233986824750897,
1502
+ 0.042118883281946094,
1503
+ 0.06462896525859814,
1504
+ 0.030322075113654222,
1505
+ 0.07985360413789709
1506
+ ],
1507
+ [
1508
+ 0.02560890540480611,
1509
+ 0.056384813785552754,
1510
+ 0.03138738974928851,
1511
+ 0.045284829139709444,
1512
+ 0.06523517310619335,
1513
+ 0.03281958527863028,
1514
+ 0.0802537137269973
1515
+ ]
1516
+ ]
1517
+ },
1518
+ "right_arm": {
1519
+ "min": [
1520
+ [
1521
+ -0.20403392612934113,
1522
+ -0.09692332148551941,
1523
+ -0.10710472613573074,
1524
+ -0.21470068395137787,
1525
+ -0.11741770058870316,
1526
+ -0.30272069573402405,
1527
+ -0.2446182668209076
1528
+ ],
1529
+ [
1530
+ -0.24915452301502228,
1531
+ -0.12140203267335892,
1532
+ -0.12297184020280838,
1533
+ -0.27966636419296265,
1534
+ -0.16135965287685394,
1535
+ -0.33457380533218384,
1536
+ -0.2506905794143677
1537
+ ],
1538
+ [
1539
+ -0.2839292883872986,
1540
+ -0.14284181594848633,
1541
+ -0.14709141850471497,
1542
+ -0.3372139036655426,
1543
+ -0.1897263079881668,
1544
+ -0.3682600259780884,
1545
+ -0.25585681200027466
1546
+ ],
1547
+ [
1548
+ -0.2932383120059967,
1549
+ -0.1601230651140213,
1550
+ -0.17002476751804352,
1551
+ -0.39521753787994385,
1552
+ -0.21874012053012848,
1553
+ -0.3951215445995331,
1554
+ -0.26058146357536316
1555
+ ],
1556
+ [
1557
+ -0.3043132424354553,
1558
+ -0.17609803378582,
1559
+ -0.19097542762756348,
1560
+ -0.45854291319847107,
1561
+ -0.24675680696964264,
1562
+ -0.40929481387138367,
1563
+ -0.26509329676628113
1564
+ ],
1565
+ [
1566
+ -0.31218695640563965,
1567
+ -0.1912798434495926,
1568
+ -0.2118159830570221,
1569
+ -0.5165465474128723,
1570
+ -0.27338576316833496,
1571
+ -0.4320828318595886,
1572
+ -0.26879316568374634
1573
+ ],
1574
+ [
1575
+ -0.3172306418418884,
1576
+ -0.2010589838027954,
1577
+ -0.22858333587646484,
1578
+ -0.5737951993942261,
1579
+ -0.3072512745857239,
1580
+ -0.4462561011314392,
1581
+ -0.272482305765152
1582
+ ],
1583
+ [
1584
+ -0.3362959325313568,
1585
+ -0.2091546654701233,
1586
+ -0.24548879265785217,
1587
+ -0.6275085210800171,
1588
+ -0.3394458591938019,
1589
+ -0.4525761008262634,
1590
+ -0.2765922248363495
1591
+ ],
1592
+ [
1593
+ -0.3560487926006317,
1594
+ -0.2143545001745224,
1595
+ -0.25719738006591797,
1596
+ -0.6733214259147644,
1597
+ -0.36781254410743713,
1598
+ -0.45484408736228943,
1599
+ -0.2909817397594452
1600
+ ],
1601
+ [
1602
+ -0.3746042251586914,
1603
+ -0.2193051278591156,
1604
+ -0.2673355042934418,
1605
+ -0.7023897171020508,
1606
+ -0.3968263566493988,
1607
+ -0.4566555321216583,
1608
+ -0.30680474638938904
1609
+ ],
1610
+ [
1611
+ -0.3877256214618683,
1612
+ -0.22685401141643524,
1613
+ -0.27599555253982544,
1614
+ -0.7348843812942505,
1615
+ -0.42345529794692993,
1616
+ -0.45726144313812256,
1617
+ -0.3226124048233032
1618
+ ],
1619
+ [
1620
+ -0.39788001775741577,
1621
+ -0.24052082002162933,
1622
+ -0.2852342426776886,
1623
+ -0.7647486925125122,
1624
+ -0.4444118142127991,
1625
+ -0.45726144313812256,
1626
+ -0.338179349899292
1627
+ ],
1628
+ [
1629
+ -0.4202839136123657,
1630
+ -0.2533271312713623,
1631
+ -0.2950189709663391,
1632
+ -0.7963362336158752,
1633
+ -0.4599958062171936,
1634
+ -0.46302181482315063,
1635
+ -0.34944838285446167
1636
+ ],
1637
+ [
1638
+ -0.44196557998657227,
1639
+ -0.2639065980911255,
1640
+ -0.3039972484111786,
1641
+ -0.8263116478919983,
1642
+ -0.46898049116134644,
1643
+ -0.47352665662765503,
1644
+ -0.35764604806900024
1645
+ ],
1646
+ [
1647
+ -0.46166762709617615,
1648
+ -0.2736857235431671,
1649
+ -0.31263232231140137,
1650
+ -0.8578991889953613,
1651
+ -0.4759763479232788,
1652
+ -0.4830750823020935,
1653
+ -0.36483684182167053
1654
+ ],
1655
+ [
1656
+ -0.4771503508090973,
1657
+ -0.27945393323898315,
1658
+ -0.3201043903827667,
1659
+ -0.8855108618736267,
1660
+ -0.4804013967514038,
1661
+ -0.49076923727989197,
1662
+ -0.37043488025665283
1663
+ ]
1664
+ ],
1665
+ "max": [
1666
+ [
1667
+ 0.12350016832351685,
1668
+ 0.06404267996549606,
1669
+ 0.07273585349321365,
1670
+ 0.1184077188372612,
1671
+ 0.15608231723308563,
1672
+ 0.11892182379961014,
1673
+ 0.1293548047542572
1674
+ ],
1675
+ [
1676
+ 0.1699630171060562,
1677
+ 0.07625115662813187,
1678
+ 0.10068810731172562,
1679
+ 0.17637698352336884,
1680
+ 0.20187558233737946,
1681
+ 0.16417747735977173,
1682
+ 0.13748396933078766
1683
+ ],
1684
+ [
1685
+ 0.21331338584423065,
1686
+ 0.08941559493541718,
1687
+ 0.1296539604663849,
1688
+ 0.21652717888355255,
1689
+ 0.23955243825912476,
1690
+ 0.20892369747161865,
1691
+ 0.16490450501441956
1692
+ ],
1693
+ [
1694
+ 0.25367626547813416,
1695
+ 0.10549355298280716,
1696
+ 0.15352654457092285,
1697
+ 0.2334277331829071,
1698
+ 0.27515116333961487,
1699
+ 0.2396731972694397,
1700
+ 0.1927846074104309
1701
+ ],
1702
+ [
1703
+ 0.29456642270088196,
1704
+ 0.11876009404659271,
1705
+ 0.17697104811668396,
1706
+ 0.25220227241516113,
1707
+ 0.3075924813747406,
1708
+ 0.28266218304634094,
1709
+ 0.21957558393478394
1710
+ ],
1711
+ [
1712
+ 0.33867692947387695,
1713
+ 0.12887607514858246,
1714
+ 0.19897224009037018,
1715
+ 0.26609882712364197,
1716
+ 0.35071811079978943,
1717
+ 0.313411682844162,
1718
+ 0.25380638241767883
1719
+ ],
1720
+ [
1721
+ 0.3845165967941284,
1722
+ 0.141583651304245,
1723
+ 0.2196330428123474,
1724
+ 0.27627819776535034,
1725
+ 0.38631683588027954,
1726
+ 0.3387223780155182,
1727
+ 0.2825891673564911
1728
+ ],
1729
+ [
1730
+ 0.42928966879844666,
1731
+ 0.15548282861709595,
1732
+ 0.23999927937984467,
1733
+ 0.3089504837989807,
1734
+ 0.41552239656448364,
1735
+ 0.3518727123737335,
1736
+ 0.31386619806289673
1737
+ ],
1738
+ [
1739
+ 0.4748417139053345,
1740
+ 0.16741718351840973,
1741
+ 0.25922197103500366,
1742
+ 0.3317282497882843,
1743
+ 0.4465799331665039,
1744
+ 0.3786901831626892,
1745
+ 0.3448449373245239
1746
+ ],
1747
+ [
1748
+ 0.5184642672538757,
1749
+ 0.17874227464199066,
1750
+ 0.27426770329475403,
1751
+ 0.3595302402973175,
1752
+ 0.482178658246994,
1753
+ 0.40916940569877625,
1754
+ 0.3698248565196991
1755
+ ],
1756
+ [
1757
+ 0.5618146657943726,
1758
+ 0.18768036365509033,
1759
+ 0.29019954800605774,
1760
+ 0.38116365671157837,
1761
+ 0.49781808257102966,
1762
+ 0.4361214339733124,
1763
+ 0.3887125551700592
1764
+ ],
1765
+ [
1766
+ 0.6021894812583923,
1767
+ 0.20052620768547058,
1768
+ 0.30705904960632324,
1769
+ 0.3944658935070038,
1770
+ 0.517340362071991,
1771
+ 0.460033655166626,
1772
+ 0.4070548415184021
1773
+ ],
1774
+ [
1775
+ 0.6377010941505432,
1776
+ 0.21067684888839722,
1777
+ 0.32628175616264343,
1778
+ 0.4203190505504608,
1779
+ 0.5374139547348022,
1780
+ 0.4898488223552704,
1781
+ 0.42289096117019653
1782
+ ],
1783
+ [
1784
+ 0.6686084270477295,
1785
+ 0.22056347131729126,
1786
+ 0.34208276867866516,
1787
+ 0.44628316164016724,
1788
+ 0.5499571561813354,
1789
+ 0.5224915146827698,
1790
+ 0.43606019020080566
1791
+ ],
1792
+ [
1793
+ 0.6988206505775452,
1794
+ 0.23028269410133362,
1795
+ 0.35506168007850647,
1796
+ 0.4720154404640198,
1797
+ 0.5587895512580872,
1798
+ 0.5494435429573059,
1799
+ 0.44888511300086975
1800
+ ],
1801
+ [
1802
+ 0.7303032279014587,
1803
+ 0.2397981584072113,
1804
+ 0.36694711446762085,
1805
+ 0.49030041694641113,
1806
+ 0.5675882697105408,
1807
+ 0.5707417130470276,
1808
+ 0.4600929319858551
1809
+ ]
1810
+ ],
1811
+ "mean": [
1812
+ [
1813
+ -0.03582767434418201,
1814
+ -0.01250010058283806,
1815
+ -0.006254769070073962,
1816
+ -0.04283755645155907,
1817
+ 0.011654662992805244,
1818
+ -0.06937566921114921,
1819
+ -0.015052688948344441
1820
+ ],
1821
+ [
1822
+ -0.03578390441834927,
1823
+ -0.012542361952364446,
1824
+ -0.0062423154246062035,
1825
+ -0.04285788163542748,
1826
+ 0.011666610371321441,
1827
+ -0.06939591988921166,
1828
+ -0.015043737448286267
1829
+ ],
1830
+ [
1831
+ -0.03573635257780552,
1832
+ -0.01258277464658022,
1833
+ -0.006227188417688013,
1834
+ -0.04288287088274956,
1835
+ 0.011680311243981124,
1836
+ -0.06941136717796326,
1837
+ -0.01503378775669262
1838
+ ],
1839
+ [
1840
+ -0.03568549714982509,
1841
+ -0.01262184213846922,
1842
+ -0.00620954162441194,
1843
+ -0.04291084408760071,
1844
+ 0.011695204209536316,
1845
+ -0.06942299082875251,
1846
+ -0.015022652118932459
1847
+ ],
1848
+ [
1849
+ -0.03563262037932873,
1850
+ -0.01265946812927723,
1851
+ -0.006189606757834555,
1852
+ -0.04294107779860497,
1853
+ 0.01171085210517049,
1854
+ -0.06943125389516354,
1855
+ -0.015011084533762188
1856
+ ],
1857
+ [
1858
+ -0.03557736463844776,
1859
+ -0.012695579789578915,
1860
+ -0.006167190847918392,
1861
+ -0.042973884195089344,
1862
+ 0.011726862099021674,
1863
+ -0.06943602152168751,
1864
+ -0.014999265305232257
1865
+ ],
1866
+ [
1867
+ -0.03552045486867428,
1868
+ -0.012730473838746549,
1869
+ -0.006142669962719084,
1870
+ -0.04300840273499489,
1871
+ 0.011743289511650802,
1872
+ -0.06943741589784622,
1873
+ -0.014987065352033827
1874
+ ],
1875
+ [
1876
+ -0.03546259365975857,
1877
+ -0.01276437770575285,
1878
+ -0.006116464780643582,
1879
+ -0.043044844269752504,
1880
+ 0.011759879160672428,
1881
+ -0.06943630464375018,
1882
+ -0.014974830939900133
1883
+ ],
1884
+ [
1885
+ -0.03540427349507809,
1886
+ -0.01279783919453621,
1887
+ -0.006089133163914085,
1888
+ -0.04308263361454011,
1889
+ 0.01177668469026685,
1890
+ -0.06943314969539642,
1891
+ -0.014962080947589131
1892
+ ],
1893
+ [
1894
+ -0.035345045104622844,
1895
+ -0.01283137295395136,
1896
+ -0.0060606808867305515,
1897
+ -0.04312264546751977,
1898
+ 0.011793670896440745,
1899
+ -0.06942722275853157,
1900
+ -0.01494853486074135
1901
+ ],
1902
+ [
1903
+ -0.03528533913195133,
1904
+ -0.012865255214273931,
1905
+ -0.006031542224809527,
1906
+ -0.04316473379731179,
1907
+ 0.011810935195535423,
1908
+ -0.06941850557923317,
1909
+ -0.01493413456482813
1910
+ ],
1911
+ [
1912
+ -0.035225605592131615,
1913
+ -0.012899211049079895,
1914
+ -0.006001975247636438,
1915
+ -0.04320825710892678,
1916
+ 0.011828238423913718,
1917
+ -0.06940734796226024,
1918
+ -0.014919213473331186
1919
+ ],
1920
+ [
1921
+ -0.03516596220433712,
1922
+ -0.012933146581053734,
1923
+ -0.005972224799916148,
1924
+ -0.04325307160615922,
1925
+ 0.01184533489868045,
1926
+ -0.0693942677229643,
1927
+ -0.014904340438079092
1928
+ ],
1929
+ [
1930
+ -0.03510625846683979,
1931
+ -0.012966645881533624,
1932
+ -0.005942378705367446,
1933
+ -0.04329852163791657,
1934
+ 0.011862090695649388,
1935
+ -0.06937944032251835,
1936
+ -0.014889814041089269
1937
+ ],
1938
+ [
1939
+ -0.035046673193573956,
1940
+ -0.012999662384390832,
1941
+ -0.005912481574341656,
1942
+ -0.043344754725694656,
1943
+ 0.011878096032887697,
1944
+ -0.06936340630054474,
1945
+ -0.0148758374969475
1946
+ ],
1947
+ [
1948
+ -0.03498690091073513,
1949
+ -0.013032052479684354,
1950
+ -0.005882559390738606,
1951
+ -0.04339146614074708,
1952
+ 0.011893658433109522,
1953
+ -0.06934643350541592,
1954
+ -0.01486258626682684
1955
+ ]
1956
+ ],
1957
+ "std": [
1958
+ [
1959
+ 0.03477682269960117,
1960
+ 0.015967660510470008,
1961
+ 0.010577178771683977,
1962
+ 0.02231690727304746,
1963
+ 0.01935560026237989,
1964
+ 0.0576732118931693,
1965
+ 0.03563473533558927
1966
+ ],
1967
+ [
1968
+ 0.0402545290160407,
1969
+ 0.017339416104566404,
1970
+ 0.01287462087613381,
1971
+ 0.030206701695104456,
1972
+ 0.022297172598633532,
1973
+ 0.05962623132719113,
1974
+ 0.037159835477380464
1975
+ ],
1976
+ [
1977
+ 0.046308560253102184,
1978
+ 0.01883499141332048,
1979
+ 0.015284263155614583,
1980
+ 0.03800328744266963,
1981
+ 0.025373324934527198,
1982
+ 0.06183712406101282,
1983
+ 0.03886313018131102
1984
+ ],
1985
+ [
1986
+ 0.0526984600775004,
1987
+ 0.020395950302051376,
1988
+ 0.01772775198437461,
1989
+ 0.045654071152752024,
1990
+ 0.028457234691831656,
1991
+ 0.06422891046778192,
1992
+ 0.040695450735886554
1993
+ ],
1994
+ [
1995
+ 0.05928247803708872,
1996
+ 0.021986449636457534,
1997
+ 0.020167337112689282,
1998
+ 0.053158958190728786,
1999
+ 0.03149235271762214,
2000
+ 0.06675081682347149,
2001
+ 0.04262057254830419
2002
+ ],
2003
+ [
2004
+ 0.06597666174511642,
2005
+ 0.023585526790273224,
2006
+ 0.022584420896651424,
2007
+ 0.06052770912023523,
2008
+ 0.03445633581295417,
2009
+ 0.06936386172204474,
2010
+ 0.04461212189796079
2011
+ ],
2012
+ [
2013
+ 0.07272609886915568,
2014
+ 0.02518105664030897,
2015
+ 0.024970121580976348,
2016
+ 0.0677737637673727,
2017
+ 0.03734779831136719,
2018
+ 0.07204227208648767,
2019
+ 0.04665268175138044
2020
+ ],
2021
+ [
2022
+ 0.07949533204648127,
2023
+ 0.026765374357720594,
2024
+ 0.027319122290795986,
2025
+ 0.07491097554848253,
2026
+ 0.040172372646544295,
2027
+ 0.07477201076046988,
2028
+ 0.04873007539466011
2029
+ ],
2030
+ [
2031
+ 0.08626017202323243,
2032
+ 0.028333838964225113,
2033
+ 0.029627855457307783,
2034
+ 0.0819458342167998,
2035
+ 0.0429380687375086,
2036
+ 0.07753870103753109,
2037
+ 0.0508352897664202
2038
+ ],
2039
+ [
2040
+ 0.09300440949119305,
2041
+ 0.029882558811346372,
2042
+ 0.03189502536624179,
2043
+ 0.08888437966692056,
2044
+ 0.04565307788147162,
2045
+ 0.08033362017048974,
2046
+ 0.052962051492794905
2047
+ ],
2048
+ [
2049
+ 0.09971600508426079,
2050
+ 0.031409536984290255,
2051
+ 0.03411895714899439,
2052
+ 0.0957283314382795,
2053
+ 0.0483230207379554,
2054
+ 0.08314767953630582,
2055
+ 0.05510450316873073
2056
+ ],
2057
+ [
2058
+ 0.10638608864893025,
2059
+ 0.03291359053372234,
2060
+ 0.03629900164484744,
2061
+ 0.10247692674828164,
2062
+ 0.05095210451946226,
2063
+ 0.08597084745876729,
2064
+ 0.05725632336065036
2065
+ ],
2066
+ [
2067
+ 0.11300806822580514,
2068
+ 0.03439386491603201,
2069
+ 0.038435083320970155,
2070
+ 0.10912999123787509,
2071
+ 0.0535429616048032,
2072
+ 0.08879412637908995,
2073
+ 0.059411233502171375
2074
+ ],
2075
+ [
2076
+ 0.11957788510031694,
2077
+ 0.03584994147560861,
2078
+ 0.040526982681765765,
2079
+ 0.11568887701241336,
2080
+ 0.05609643960843335,
2081
+ 0.0916099753823453,
2082
+ 0.06156389000573656
2083
+ ],
2084
+ [
2085
+ 0.12609203269111857,
2086
+ 0.0372818918218203,
2087
+ 0.04257587540253166,
2088
+ 0.12215468170860881,
2089
+ 0.05861307257134494,
2090
+ 0.09441320913342521,
2091
+ 0.06370945664483509
2092
+ ],
2093
+ [
2094
+ 0.13254695047371112,
2095
+ 0.038689639706803294,
2096
+ 0.044583043936732694,
2097
+ 0.1285279826904193,
2098
+ 0.061093671578013965,
2099
+ 0.09719896844726462,
2100
+ 0.06584454676349252
2101
+ ]
2102
+ ],
2103
+ "q01": [
2104
+ [
2105
+ -0.13750962615013124,
2106
+ -0.06643416166305542,
2107
+ -0.05379039436578751,
2108
+ -0.16610072314739227,
2109
+ -0.05582443341612816,
2110
+ -0.22091783165931703,
2111
+ -0.1768096750974655
2112
+ ],
2113
+ [
2114
+ -0.1442956429719925,
2115
+ -0.07424459218978882,
2116
+ -0.0657055938243866,
2117
+ -0.21224747031927108,
2118
+ -0.07909185022115707,
2119
+ -0.2304303228855133,
2120
+ -0.17945939362049104
2121
+ ],
2122
+ [
2123
+ -0.15486243665218352,
2124
+ -0.08444386839866638,
2125
+ -0.07653697550296784,
2126
+ -0.25782069861888884,
2127
+ -0.10174585908651353,
2128
+ -0.24095106601715088,
2129
+ -0.1816268652677536
2130
+ ],
2131
+ [
2132
+ -0.1672323939204216,
2133
+ -0.09366734936833382,
2134
+ -0.0881002850830555,
2135
+ -0.3017538303136826,
2136
+ -0.1235863657295704,
2137
+ -0.25071354269981383,
2138
+ -0.18356355488300324
2139
+ ],
2140
+ [
2141
+ -0.18318656593561172,
2142
+ -0.10348658129572869,
2143
+ -0.09799629881978035,
2144
+ -0.3396466678380966,
2145
+ -0.14394661366939546,
2146
+ -0.2615929090976715,
2147
+ -0.18613846361637115
2148
+ ],
2149
+ [
2150
+ -0.2008179810643196,
2151
+ -0.11147361397743225,
2152
+ -0.1096699756383896,
2153
+ -0.3813889110088348,
2154
+ -0.16273667722940446,
2155
+ -0.27112489581108096,
2156
+ -0.1886428999900818
2157
+ ],
2158
+ [
2159
+ -0.22175236612558366,
2160
+ -0.12001352936029434,
2161
+ -0.1190910667181015,
2162
+ -0.41556587398052214,
2163
+ -0.1803624963760376,
2164
+ -0.28135055661201475,
2165
+ -0.1904868507385254
2166
+ ],
2167
+ [
2168
+ -0.2403111895918846,
2169
+ -0.12846219629049302,
2170
+ -0.12763472348451615,
2171
+ -0.4540656328201294,
2172
+ -0.19633709281682968,
2173
+ -0.29061655402183534,
2174
+ -0.19292436003684998
2175
+ ],
2176
+ [
2177
+ -0.2593554621934891,
2178
+ -0.1365390294790268,
2179
+ -0.1356007942557335,
2180
+ -0.48881450057029724,
2181
+ -0.21455552399158478,
2182
+ -0.29843022108078004,
2183
+ -0.19563140332698822
2184
+ ],
2185
+ [
2186
+ -0.27707220017910006,
2187
+ -0.14462393015623093,
2188
+ -0.1427755644917488,
2189
+ -0.5257772397994995,
2190
+ -0.22815347015857695,
2191
+ -0.3061085331439972,
2192
+ -0.19757799208164215
2193
+ ],
2194
+ [
2195
+ -0.29522701680660246,
2196
+ -0.152439386844635,
2197
+ -0.15019092381000518,
2198
+ -0.5610815978050232,
2199
+ -0.24187681674957276,
2200
+ -0.3141219937801361,
2201
+ -0.20047558784484865
2202
+ ],
2203
+ [
2204
+ -0.31237027764320374,
2205
+ -0.1595780423283577,
2206
+ -0.1589815002679825,
2207
+ -0.5931087756156922,
2208
+ -0.2566365534067154,
2209
+ -0.32205559134483336,
2210
+ -0.2030619376897812
2211
+ ],
2212
+ [
2213
+ -0.3293491894006729,
2214
+ -0.16627603381872177,
2215
+ -0.1662128287553787,
2216
+ -0.6243962502479553,
2217
+ -0.26918759644031526,
2218
+ -0.3302214515209198,
2219
+ -0.20550915956497193
2220
+ ],
2221
+ [
2222
+ -0.3432493054866791,
2223
+ -0.17293286204338074,
2224
+ -0.17499921411275865,
2225
+ -0.6579160118103028,
2226
+ -0.28232413351535796,
2227
+ -0.33827576994895936,
2228
+ -0.2141080316901207
2229
+ ],
2230
+ [
2231
+ -0.3580256187915802,
2232
+ -0.17955820739269257,
2233
+ -0.18397489190101624,
2234
+ -0.6864277935028076,
2235
+ -0.2989607441425324,
2236
+ -0.34603124976158145,
2237
+ -0.22192867696285248
2238
+ ],
2239
+ [
2240
+ -0.37304157376289365,
2241
+ -0.18637359738349915,
2242
+ -0.19211902767419814,
2243
+ -0.7176893746852875,
2244
+ -0.3133934587240219,
2245
+ -0.35450040459632876,
2246
+ -0.23084656566381453
2247
+ ]
2248
+ ],
2249
+ "q99": [
2250
+ [
2251
+ 0.06876460745930675,
2252
+ 0.020636646971106322,
2253
+ 0.03304188787937161,
2254
+ 0.030265976786613476,
2255
+ 0.08688223853707315,
2256
+ 0.060906679257750514,
2257
+ 0.0528251446783543
2258
+ ],
2259
+ [
2260
+ 0.1062502343952656,
2261
+ 0.0313371342420578,
2262
+ 0.045918569713830945,
2263
+ 0.05503048159182072,
2264
+ 0.11584696710109718,
2265
+ 0.09195318818092355,
2266
+ 0.0739240574836731
2267
+ ],
2268
+ [
2269
+ 0.14319816350936895,
2270
+ 0.043995615169405936,
2271
+ 0.05877865627408015,
2272
+ 0.07896375671029095,
2273
+ 0.1437885639071465,
2274
+ 0.12012365907430662,
2275
+ 0.09624448224902156
2276
+ ],
2277
+ [
2278
+ 0.17890654146671295,
2279
+ 0.05613635055720807,
2280
+ 0.07135939329862585,
2281
+ 0.10097489759325982,
2282
+ 0.1661786872148514,
2283
+ 0.1514884126186371,
2284
+ 0.11589529380202296
2285
+ ],
2286
+ [
2287
+ 0.21317545413970954,
2288
+ 0.06842131793498997,
2289
+ 0.08289320051669984,
2290
+ 0.12239758342504506,
2291
+ 0.1899145030975342,
2292
+ 0.17617294430732727,
2293
+ 0.132357497215271
2294
+ ],
2295
+ [
2296
+ 0.2453216093778611,
2297
+ 0.07987486168742182,
2298
+ 0.09538901537656753,
2299
+ 0.1439731976389885,
2300
+ 0.21187280625104907,
2301
+ 0.2013280785083772,
2302
+ 0.1521772193908692
2303
+ ],
2304
+ [
2305
+ 0.27716145157814026,
2306
+ 0.09236870273947716,
2307
+ 0.10861561179161051,
2308
+ 0.16238527655601503,
2309
+ 0.23231437116861364,
2310
+ 0.22694210410118124,
2311
+ 0.1705306005477906
2312
+ ],
2313
+ [
2314
+ 0.30980274319648743,
2315
+ 0.10261451140046124,
2316
+ 0.12044742822647093,
2317
+ 0.17999265134334588,
2318
+ 0.25345147967338566,
2319
+ 0.24835097044706347,
2320
+ 0.18992119938135155
2321
+ ],
2322
+ [
2323
+ 0.3391019552946091,
2324
+ 0.11362516194581988,
2325
+ 0.13288157522678337,
2326
+ 0.19921493858098988,
2327
+ 0.27053190648555764,
2328
+ 0.27057337403297443,
2329
+ 0.20635072708129898
2330
+ ],
2331
+ [
2332
+ 0.3723010545969011,
2333
+ 0.12229597300291063,
2334
+ 0.14326481103897093,
2335
+ 0.2173730051517487,
2336
+ 0.29011084198951737,
2337
+ 0.2935291838645937,
2338
+ 0.2212876501679422
2339
+ ],
2340
+ [
2341
+ 0.4017260801792145,
2342
+ 0.13222214728593829,
2343
+ 0.1537876594066618,
2344
+ 0.2360604000091554,
2345
+ 0.31263809740543386,
2346
+ 0.3140127611160278,
2347
+ 0.238391069173813
2348
+ ],
2349
+ [
2350
+ 0.4283363085985184,
2351
+ 0.14138811290264136,
2352
+ 0.16369911432266163,
2353
+ 0.2567266005277634,
2354
+ 0.33296488821506526,
2355
+ 0.33412302494049073,
2356
+ 0.2544368982315066
2357
+ ],
2358
+ [
2359
+ 0.45506512820720674,
2360
+ 0.15006588369607937,
2361
+ 0.1723968386650068,
2362
+ 0.2756039023399355,
2363
+ 0.3488368517160416,
2364
+ 0.34996506810188294,
2365
+ 0.27001377463340764
2366
+ ],
2367
+ [
2368
+ 0.48286862015724186,
2369
+ 0.15912629574537288,
2370
+ 0.1819396793842311,
2371
+ 0.29650268077850345,
2372
+ 0.3609114360809328,
2373
+ 0.36849267482757575,
2374
+ 0.286020663380623
2375
+ ],
2376
+ [
2377
+ 0.5130151581764222,
2378
+ 0.16787215113639847,
2379
+ 0.19066066682338698,
2380
+ 0.3159227859973909,
2381
+ 0.37714088082313546,
2382
+ 0.3841358405351639,
2383
+ 0.3024951797723771
2384
+ ],
2385
+ [
2386
+ 0.5401777374744415,
2387
+ 0.17374392241239553,
2388
+ 0.20006704270839593,
2389
+ 0.3340842992067339,
2390
+ 0.39159274637699143,
2391
+ 0.39957653164863594,
2392
+ 0.3164052081108096
2393
+ ]
2394
+ ]
2395
+ }
2396
+ }
2397
+ }
2398
+ }
experiment_cfg/final_model_config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_type": "Gr00tN1d6",
3
+ "model_dtype": "bfloat16",
4
+ "model_name": "nvidia/Eagle-Block2A-2B-v2",
5
+ "backbone_model_type": "eagle",
6
+ "model_revision": null,
7
+ "tune_top_llm_layers": 4,
8
+ "backbone_embedding_dim": 2048,
9
+ "tune_llm": false,
10
+ "tune_visual": false,
11
+ "select_layer": 16,
12
+ "reproject_vision": false,
13
+ "use_flash_attention": true,
14
+ "load_bf16": true,
15
+ "collator_overwrite_image_inputs": false,
16
+ "eagle_collator": true,
17
+ "backbone_trainable_params_fp32": true,
18
+ "apply_sincos_state_encoding": true,
19
+ "use_relative_action": true,
20
+ "max_state_dim": 128,
21
+ "max_action_dim": 128,
22
+ "action_horizon": 50,
23
+ "hidden_size": 1024,
24
+ "input_embedding_dim": 1536,
25
+ "add_pos_embed": true,
26
+ "attn_dropout": 0.2,
27
+ "use_vlln": true,
28
+ "max_seq_len": 1024,
29
+ "use_alternate_vl_dit": true,
30
+ "attend_text_every_n_blocks": 2,
31
+ "diffusion_model_cfg": {
32
+ "attention_head_dim": 48,
33
+ "dropout": 0.2,
34
+ "final_dropout": true,
35
+ "interleave_self_attention": true,
36
+ "norm_type": "ada_norm",
37
+ "num_attention_heads": 32,
38
+ "num_layers": 32,
39
+ "output_dim": 1024,
40
+ "positional_embeddings": null
41
+ },
42
+ "num_inference_timesteps": 4,
43
+ "noise_beta_alpha": 1.5,
44
+ "noise_beta_beta": 1.0,
45
+ "noise_s": 0.999,
46
+ "num_timestep_buckets": 1000,
47
+ "tune_projector": true,
48
+ "tune_diffusion_model": true,
49
+ "tune_vlln": true,
50
+ "state_dropout_prob": 0.0,
51
+ "state_additive_noise_scale": 0.0,
52
+ "max_num_embodiments": 32
53
+ }
experiment_cfg/final_processor_config.json ADDED
The diff for this file is too large to render. See raw diff
 
global_step20000/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50ee5f9c100b3edb0262c10ef02b1679914799ef4409b3e5adbbb416cc173baf
3
+ size 9719841825
global_step20000/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:175fbb0bb9276ab10521c9d5faf597ea74d1d024f35e155eb086da230560a318
3
+ size 9719835873
global_step20000/mp_rank_00_model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa01a165f53c22f3663ee9f40f3cb2a8ef2ac089ac9fd814ce1153489ad32fff
3
+ size 9907202435
latest ADDED
@@ -0,0 +1 @@
 
 
1
+ global_step20000
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ba9ec5f00fd628a12f6fcfd7434d99ae69fbcbd1f2611f50c44df3f200f4b3f0
3
+ size 4991091456
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:debc61e42293ae34f1ad3a5a5c6c9f96cc3bb53a70047f4969b13ad6f7fe6b79
3
+ size 1582283096
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
processor_config.json ADDED
@@ -0,0 +1,641 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "processor_class": "Gr00tN1d6Processor",
3
+ "processor_kwargs": {
4
+ "modality_configs": {
5
+ "behavior_r1_pro": {
6
+ "video": {
7
+ "delta_indices": [
8
+ 0
9
+ ],
10
+ "modality_keys": [
11
+ "observation.images.rgb.head_256_256",
12
+ "observation.images.rgb.left_wrist_256_256",
13
+ "observation.images.rgb.right_wrist_256_256"
14
+ ],
15
+ "sin_cos_embedding_keys": null,
16
+ "mean_std_embedding_keys": null,
17
+ "action_configs": null
18
+ },
19
+ "state": {
20
+ "delta_indices": [
21
+ 0
22
+ ],
23
+ "modality_keys": [
24
+ "robot_pos",
25
+ "robot_ori_cos",
26
+ "robot_ori_sin",
27
+ "robot_2d_ori",
28
+ "robot_2d_ori_cos",
29
+ "robot_2d_ori_sin",
30
+ "robot_lin_vel",
31
+ "robot_ang_vel",
32
+ "arm_left_qpos",
33
+ "arm_left_qpos_sin",
34
+ "arm_left_qpos_cos",
35
+ "eef_left_pos",
36
+ "eef_left_quat",
37
+ "gripper_left_qpos",
38
+ "arm_right_qpos",
39
+ "arm_right_qpos_sin",
40
+ "arm_right_qpos_cos",
41
+ "eef_right_pos",
42
+ "eef_right_quat",
43
+ "gripper_right_qpos",
44
+ "trunk_qpos"
45
+ ],
46
+ "sin_cos_embedding_keys": null,
47
+ "mean_std_embedding_keys": null,
48
+ "action_configs": null
49
+ },
50
+ "action": {
51
+ "delta_indices": [
52
+ 0,
53
+ 1,
54
+ 2,
55
+ 3,
56
+ 4,
57
+ 5,
58
+ 6,
59
+ 7,
60
+ 8,
61
+ 9,
62
+ 10,
63
+ 11,
64
+ 12,
65
+ 13,
66
+ 14,
67
+ 15,
68
+ 16,
69
+ 17,
70
+ 18,
71
+ 19,
72
+ 20,
73
+ 21,
74
+ 22,
75
+ 23,
76
+ 24,
77
+ 25,
78
+ 26,
79
+ 27,
80
+ 28,
81
+ 29,
82
+ 30,
83
+ 31
84
+ ],
85
+ "modality_keys": [
86
+ "base",
87
+ "torso",
88
+ "left_arm",
89
+ "left_gripper",
90
+ "right_arm",
91
+ "right_gripper"
92
+ ],
93
+ "sin_cos_embedding_keys": null,
94
+ "mean_std_embedding_keys": null,
95
+ "action_configs": [
96
+ {
97
+ "rep": "ABSOLUTE",
98
+ "type": "NON_EEF",
99
+ "format": "DEFAULT",
100
+ "state_key": null
101
+ },
102
+ {
103
+ "rep": "RELATIVE",
104
+ "type": "NON_EEF",
105
+ "format": "DEFAULT",
106
+ "state_key": "trunk_qpos"
107
+ },
108
+ {
109
+ "rep": "RELATIVE",
110
+ "type": "NON_EEF",
111
+ "format": "DEFAULT",
112
+ "state_key": "arm_left_qpos"
113
+ },
114
+ {
115
+ "rep": "ABSOLUTE",
116
+ "type": "NON_EEF",
117
+ "format": "DEFAULT",
118
+ "state_key": null
119
+ },
120
+ {
121
+ "rep": "RELATIVE",
122
+ "type": "NON_EEF",
123
+ "format": "DEFAULT",
124
+ "state_key": "arm_right_qpos"
125
+ },
126
+ {
127
+ "rep": "ABSOLUTE",
128
+ "type": "NON_EEF",
129
+ "format": "DEFAULT",
130
+ "state_key": null
131
+ }
132
+ ]
133
+ },
134
+ "language": {
135
+ "delta_indices": [
136
+ 0
137
+ ],
138
+ "modality_keys": [
139
+ "annotation.human.coarse_action"
140
+ ],
141
+ "sin_cos_embedding_keys": null,
142
+ "mean_std_embedding_keys": null,
143
+ "action_configs": null
144
+ }
145
+ },
146
+ "gr1": {
147
+ "video": {
148
+ "delta_indices": [
149
+ 0
150
+ ],
151
+ "modality_keys": [
152
+ "ego_view_bg_crop_pad_res256_freq20"
153
+ ],
154
+ "sin_cos_embedding_keys": null,
155
+ "mean_std_embedding_keys": null,
156
+ "action_configs": null
157
+ },
158
+ "state": {
159
+ "delta_indices": [
160
+ 0
161
+ ],
162
+ "modality_keys": [
163
+ "left_arm",
164
+ "right_arm",
165
+ "left_hand",
166
+ "right_hand",
167
+ "waist"
168
+ ],
169
+ "sin_cos_embedding_keys": [
170
+ "left_arm",
171
+ "right_arm",
172
+ "left_hand",
173
+ "right_hand",
174
+ "waist"
175
+ ],
176
+ "mean_std_embedding_keys": null,
177
+ "action_configs": null
178
+ },
179
+ "action": {
180
+ "delta_indices": [
181
+ 0,
182
+ 1,
183
+ 2,
184
+ 3,
185
+ 4,
186
+ 5,
187
+ 6,
188
+ 7,
189
+ 8,
190
+ 9,
191
+ 10,
192
+ 11,
193
+ 12,
194
+ 13,
195
+ 14,
196
+ 15
197
+ ],
198
+ "modality_keys": [
199
+ "left_arm",
200
+ "right_arm",
201
+ "left_hand",
202
+ "right_hand",
203
+ "waist"
204
+ ],
205
+ "sin_cos_embedding_keys": null,
206
+ "mean_std_embedding_keys": null,
207
+ "action_configs": [
208
+ {
209
+ "rep": "RELATIVE",
210
+ "type": "NON_EEF",
211
+ "format": "DEFAULT",
212
+ "state_key": null
213
+ },
214
+ {
215
+ "rep": "RELATIVE",
216
+ "type": "NON_EEF",
217
+ "format": "DEFAULT",
218
+ "state_key": null
219
+ },
220
+ {
221
+ "rep": "RELATIVE",
222
+ "type": "NON_EEF",
223
+ "format": "DEFAULT",
224
+ "state_key": null
225
+ },
226
+ {
227
+ "rep": "RELATIVE",
228
+ "type": "NON_EEF",
229
+ "format": "DEFAULT",
230
+ "state_key": null
231
+ },
232
+ {
233
+ "rep": "ABSOLUTE",
234
+ "type": "NON_EEF",
235
+ "format": "DEFAULT",
236
+ "state_key": null
237
+ }
238
+ ]
239
+ },
240
+ "language": {
241
+ "delta_indices": [
242
+ 0
243
+ ],
244
+ "modality_keys": [
245
+ "task"
246
+ ],
247
+ "sin_cos_embedding_keys": null,
248
+ "mean_std_embedding_keys": null,
249
+ "action_configs": null
250
+ },
251
+ "rl_info": {
252
+ "delta_indices": [
253
+ 0
254
+ ],
255
+ "modality_keys": [],
256
+ "sin_cos_embedding_keys": null,
257
+ "mean_std_embedding_keys": null,
258
+ "action_configs": null
259
+ }
260
+ },
261
+ "robocasa_panda_omron": {
262
+ "video": {
263
+ "delta_indices": [
264
+ 0
265
+ ],
266
+ "modality_keys": [
267
+ "res256_image_side_0",
268
+ "res256_image_side_1",
269
+ "res256_image_wrist_0"
270
+ ],
271
+ "sin_cos_embedding_keys": null,
272
+ "mean_std_embedding_keys": null,
273
+ "action_configs": null
274
+ },
275
+ "state": {
276
+ "delta_indices": [
277
+ 0
278
+ ],
279
+ "modality_keys": [
280
+ "end_effector_position_relative",
281
+ "end_effector_rotation_relative",
282
+ "gripper_qpos",
283
+ "base_position",
284
+ "base_rotation"
285
+ ],
286
+ "sin_cos_embedding_keys": null,
287
+ "mean_std_embedding_keys": null,
288
+ "action_configs": null
289
+ },
290
+ "action": {
291
+ "delta_indices": [
292
+ 0,
293
+ 1,
294
+ 2,
295
+ 3,
296
+ 4,
297
+ 5,
298
+ 6,
299
+ 7,
300
+ 8,
301
+ 9,
302
+ 10,
303
+ 11,
304
+ 12,
305
+ 13,
306
+ 14,
307
+ 15
308
+ ],
309
+ "modality_keys": [
310
+ "end_effector_position",
311
+ "end_effector_rotation",
312
+ "gripper_close",
313
+ "base_motion",
314
+ "control_mode"
315
+ ],
316
+ "sin_cos_embedding_keys": null,
317
+ "mean_std_embedding_keys": null,
318
+ "action_configs": [
319
+ {
320
+ "rep": "ABSOLUTE",
321
+ "type": "NON_EEF",
322
+ "format": "DEFAULT",
323
+ "state_key": null
324
+ },
325
+ {
326
+ "rep": "ABSOLUTE",
327
+ "type": "NON_EEF",
328
+ "format": "DEFAULT",
329
+ "state_key": null
330
+ },
331
+ {
332
+ "rep": "ABSOLUTE",
333
+ "type": "NON_EEF",
334
+ "format": "DEFAULT",
335
+ "state_key": null
336
+ },
337
+ {
338
+ "rep": "ABSOLUTE",
339
+ "type": "NON_EEF",
340
+ "format": "DEFAULT",
341
+ "state_key": null
342
+ },
343
+ {
344
+ "rep": "ABSOLUTE",
345
+ "type": "NON_EEF",
346
+ "format": "DEFAULT",
347
+ "state_key": null
348
+ }
349
+ ]
350
+ },
351
+ "language": {
352
+ "delta_indices": [
353
+ 0
354
+ ],
355
+ "modality_keys": [
356
+ "annotation.human.action.task_description"
357
+ ],
358
+ "sin_cos_embedding_keys": null,
359
+ "mean_std_embedding_keys": null,
360
+ "action_configs": null
361
+ }
362
+ },
363
+ "unitree_g1": {
364
+ "video": {
365
+ "delta_indices": [
366
+ 0
367
+ ],
368
+ "modality_keys": [
369
+ "ego_view"
370
+ ],
371
+ "sin_cos_embedding_keys": null,
372
+ "mean_std_embedding_keys": null,
373
+ "action_configs": null
374
+ },
375
+ "state": {
376
+ "delta_indices": [
377
+ 0
378
+ ],
379
+ "modality_keys": [
380
+ "left_leg",
381
+ "right_leg",
382
+ "waist",
383
+ "left_arm",
384
+ "right_arm",
385
+ "left_hand",
386
+ "right_hand"
387
+ ],
388
+ "sin_cos_embedding_keys": null,
389
+ "mean_std_embedding_keys": null,
390
+ "action_configs": null
391
+ },
392
+ "action": {
393
+ "delta_indices": [
394
+ 0,
395
+ 1,
396
+ 2,
397
+ 3,
398
+ 4,
399
+ 5,
400
+ 6,
401
+ 7,
402
+ 8,
403
+ 9,
404
+ 10,
405
+ 11,
406
+ 12,
407
+ 13,
408
+ 14,
409
+ 15,
410
+ 16,
411
+ 17,
412
+ 18,
413
+ 19,
414
+ 20,
415
+ 21,
416
+ 22,
417
+ 23,
418
+ 24,
419
+ 25,
420
+ 26,
421
+ 27,
422
+ 28,
423
+ 29
424
+ ],
425
+ "modality_keys": [
426
+ "left_arm",
427
+ "right_arm",
428
+ "left_hand",
429
+ "right_hand",
430
+ "waist",
431
+ "base_height_command",
432
+ "navigate_command"
433
+ ],
434
+ "sin_cos_embedding_keys": null,
435
+ "mean_std_embedding_keys": null,
436
+ "action_configs": [
437
+ {
438
+ "rep": "RELATIVE",
439
+ "type": "NON_EEF",
440
+ "format": "DEFAULT",
441
+ "state_key": null
442
+ },
443
+ {
444
+ "rep": "RELATIVE",
445
+ "type": "NON_EEF",
446
+ "format": "DEFAULT",
447
+ "state_key": null
448
+ },
449
+ {
450
+ "rep": "ABSOLUTE",
451
+ "type": "NON_EEF",
452
+ "format": "DEFAULT",
453
+ "state_key": null
454
+ },
455
+ {
456
+ "rep": "ABSOLUTE",
457
+ "type": "NON_EEF",
458
+ "format": "DEFAULT",
459
+ "state_key": null
460
+ },
461
+ {
462
+ "rep": "ABSOLUTE",
463
+ "type": "NON_EEF",
464
+ "format": "DEFAULT",
465
+ "state_key": null
466
+ },
467
+ {
468
+ "rep": "ABSOLUTE",
469
+ "type": "NON_EEF",
470
+ "format": "DEFAULT",
471
+ "state_key": null
472
+ },
473
+ {
474
+ "rep": "ABSOLUTE",
475
+ "type": "NON_EEF",
476
+ "format": "DEFAULT",
477
+ "state_key": null
478
+ }
479
+ ]
480
+ },
481
+ "language": {
482
+ "delta_indices": [
483
+ 0
484
+ ],
485
+ "modality_keys": [
486
+ "annotation.human.task_description"
487
+ ],
488
+ "sin_cos_embedding_keys": null,
489
+ "mean_std_embedding_keys": null,
490
+ "action_configs": null
491
+ }
492
+ },
493
+ "new_embodiment": {
494
+ "video": {
495
+ "delta_indices": [
496
+ 0
497
+ ],
498
+ "modality_keys": [
499
+ "ego_view",
500
+ "wrist_left",
501
+ "wrist_right"
502
+ ],
503
+ "sin_cos_embedding_keys": null,
504
+ "mean_std_embedding_keys": null,
505
+ "action_configs": null
506
+ },
507
+ "state": {
508
+ "delta_indices": [
509
+ 0
510
+ ],
511
+ "modality_keys": [
512
+ "left_arm",
513
+ "right_arm",
514
+ "left_hand",
515
+ "right_hand",
516
+ "left_leg",
517
+ "right_leg"
518
+ ],
519
+ "sin_cos_embedding_keys": [
520
+ "left_arm",
521
+ "right_arm",
522
+ "left_hand",
523
+ "right_hand",
524
+ "left_leg",
525
+ "right_leg"
526
+ ],
527
+ "mean_std_embedding_keys": [],
528
+ "action_configs": null
529
+ },
530
+ "action": {
531
+ "delta_indices": [
532
+ 0,
533
+ 1,
534
+ 2,
535
+ 3,
536
+ 4,
537
+ 5,
538
+ 6,
539
+ 7,
540
+ 8,
541
+ 9,
542
+ 10,
543
+ 11,
544
+ 12,
545
+ 13,
546
+ 14,
547
+ 15
548
+ ],
549
+ "modality_keys": [
550
+ "left_arm",
551
+ "right_arm",
552
+ "left_hand",
553
+ "right_hand",
554
+ "navigate_command",
555
+ "base_height_command",
556
+ "waist"
557
+ ],
558
+ "sin_cos_embedding_keys": null,
559
+ "mean_std_embedding_keys": null,
560
+ "action_configs": [
561
+ {
562
+ "rep": "RELATIVE",
563
+ "type": "NON_EEF",
564
+ "format": "DEFAULT",
565
+ "state_key": null
566
+ },
567
+ {
568
+ "rep": "RELATIVE",
569
+ "type": "NON_EEF",
570
+ "format": "DEFAULT",
571
+ "state_key": null
572
+ },
573
+ {
574
+ "rep": "ABSOLUTE",
575
+ "type": "NON_EEF",
576
+ "format": "DEFAULT",
577
+ "state_key": null
578
+ },
579
+ {
580
+ "rep": "ABSOLUTE",
581
+ "type": "NON_EEF",
582
+ "format": "DEFAULT",
583
+ "state_key": null
584
+ },
585
+ {
586
+ "rep": "ABSOLUTE",
587
+ "type": "NON_EEF",
588
+ "format": "DEFAULT",
589
+ "state_key": null
590
+ },
591
+ {
592
+ "rep": "ABSOLUTE",
593
+ "type": "NON_EEF",
594
+ "format": "DEFAULT",
595
+ "state_key": null
596
+ },
597
+ {
598
+ "rep": "ABSOLUTE",
599
+ "type": "NON_EEF",
600
+ "format": "DEFAULT",
601
+ "state_key": null
602
+ }
603
+ ]
604
+ },
605
+ "language": {
606
+ "delta_indices": [
607
+ 0
608
+ ],
609
+ "modality_keys": [
610
+ "annotation.human.action.task_description"
611
+ ],
612
+ "sin_cos_embedding_keys": null,
613
+ "mean_std_embedding_keys": null,
614
+ "action_configs": null
615
+ }
616
+ }
617
+ },
618
+ "image_crop_size": null,
619
+ "image_target_size": null,
620
+ "use_albumentations": true,
621
+ "random_rotation_angle": 5,
622
+ "color_jitter_params": {
623
+ "brightness": 0.2,
624
+ "contrast": 0.2,
625
+ "saturation": 0.2,
626
+ "hue": 0.08
627
+ },
628
+ "shortest_image_edge": 256,
629
+ "crop_fraction": 0.95,
630
+ "model_name": "nvidia/Eagle-Block2A-2B-v2",
631
+ "model_type": "eagle",
632
+ "formalize_language": true,
633
+ "max_state_dim": 128,
634
+ "max_action_dim": 128,
635
+ "max_action_horizon": 50,
636
+ "use_percentiles": false,
637
+ "clip_outliers": true,
638
+ "apply_sincos_state_encoding": true,
639
+ "use_relative_action": true
640
+ }
641
+ }
rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3a2d386a33a3a3e726b5af06a1adef33f3da8e695771b1410c8be86d10f29b3b
3
+ size 14917
rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e4d3c79f81b9d18e67f3cc1809d5633c90ae75a4916c495e3f9cb6a8f5483ace
3
+ size 14917
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7fa802a80def971b73ec74284a6aa44d0b2ea101bd38ed41a3b1c1a0b4001f00
3
+ size 1465
statistics.json ADDED
The diff for this file is too large to render. See raw diff
 
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c169e0847598e0a12fabee3506ec74d73bcc8552adeb6d6d56715a410fb13f4
3
+ size 7633
wandb_config.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"project": "finetune-gr00t-n1d6", "run_id": "test"}
zero_to_fp32.py ADDED
@@ -0,0 +1,760 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/usr/bin/env python
2
+
3
+ # Copyright (c) Microsoft Corporation.
4
+ # SPDX-License-Identifier: Apache-2.0
5
+
6
+ # DeepSpeed Team
7
+
8
+ # This script extracts fp32 consolidated weights from a zero 1, 2 and 3 DeepSpeed checkpoints. It gets
9
+ # copied into the top level checkpoint dir, so the user can easily do the conversion at any point in
10
+ # the future. Once extracted, the weights don't require DeepSpeed and can be used in any
11
+ # application.
12
+ #
13
+ # example:
14
+ # python zero_to_fp32.py . output_dir/
15
+ # or
16
+ # python zero_to_fp32.py . output_dir/ --safe_serialization
17
+
18
+ import argparse
19
+ import torch
20
+ import glob
21
+ import math
22
+ import os
23
+ import re
24
+ import gc
25
+ import json
26
+ import numpy as np
27
+ from tqdm import tqdm
28
+ from collections import OrderedDict
29
+ from dataclasses import dataclass
30
+
31
+ # while this script doesn't use deepspeed to recover data, since the checkpoints are pickled with
32
+ # DeepSpeed data structures it has to be available in the current python environment.
33
+ from deepspeed.utils import logger
34
+ from deepspeed.checkpoint.constants import (DS_VERSION, OPTIMIZER_STATE_DICT, SINGLE_PARTITION_OF_FP32_GROUPS,
35
+ FP32_FLAT_GROUPS, ZERO_STAGE, PARTITION_COUNT, PARAM_SHAPES, BUFFER_NAMES,
36
+ FROZEN_PARAM_SHAPES, FROZEN_PARAM_FRAGMENTS)
37
+
38
+
39
+ @dataclass
40
+ class zero_model_state:
41
+ buffers: dict()
42
+ param_shapes: dict()
43
+ shared_params: list
44
+ ds_version: int
45
+ frozen_param_shapes: dict()
46
+ frozen_param_fragments: dict()
47
+
48
+
49
+ debug = 0
50
+
51
+ # load to cpu
52
+ device = torch.device('cpu')
53
+
54
+
55
+ def atoi(text):
56
+ return int(text) if text.isdigit() else text
57
+
58
+
59
+ def natural_keys(text):
60
+ '''
61
+ alist.sort(key=natural_keys) sorts in human order
62
+ http://nedbatchelder.com/blog/200712/human_sorting.html
63
+ (See Toothy's implementation in the comments)
64
+ '''
65
+ return [atoi(c) for c in re.split(r'(\d+)', text)]
66
+
67
+
68
+ def get_model_state_file(checkpoint_dir, zero_stage):
69
+ if not os.path.isdir(checkpoint_dir):
70
+ raise FileNotFoundError(f"Directory '{checkpoint_dir}' doesn't exist")
71
+
72
+ # there should be only one file
73
+ if zero_stage <= 2:
74
+ file = os.path.join(checkpoint_dir, "mp_rank_00_model_states.pt")
75
+ elif zero_stage == 3:
76
+ file = os.path.join(checkpoint_dir, "zero_pp_rank_0_mp_rank_00_model_states.pt")
77
+
78
+ if not os.path.exists(file):
79
+ raise FileNotFoundError(f"can't find model states file at '{file}'")
80
+
81
+ return file
82
+
83
+
84
+ def get_checkpoint_files(checkpoint_dir, glob_pattern):
85
+ # XXX: need to test that this simple glob rule works for multi-node setup too
86
+ ckpt_files = sorted(glob.glob(os.path.join(checkpoint_dir, glob_pattern)), key=natural_keys)
87
+
88
+ if len(ckpt_files) == 0:
89
+ raise FileNotFoundError(f"can't find {glob_pattern} files in directory '{checkpoint_dir}'")
90
+
91
+ return ckpt_files
92
+
93
+
94
+ def get_optim_files(checkpoint_dir):
95
+ return get_checkpoint_files(checkpoint_dir, "*_optim_states.pt")
96
+
97
+
98
+ def get_model_state_files(checkpoint_dir):
99
+ return get_checkpoint_files(checkpoint_dir, "*_model_states.pt")
100
+
101
+
102
+ def parse_model_states(files):
103
+ zero_model_states = []
104
+ for file in files:
105
+ state_dict = torch.load(file, map_location=device, weights_only=False)
106
+
107
+ if BUFFER_NAMES not in state_dict:
108
+ raise ValueError(f"{file} is not a model state checkpoint")
109
+ buffer_names = state_dict[BUFFER_NAMES]
110
+ if debug:
111
+ print("Found buffers:", buffer_names)
112
+
113
+ # recover just the buffers while restoring them to fp32 if they were saved in fp16
114
+ buffers = {k: v.float() for k, v in state_dict["module"].items() if k in buffer_names}
115
+ param_shapes = state_dict[PARAM_SHAPES]
116
+
117
+ # collect parameters that are included in param_shapes
118
+ param_names = []
119
+ for s in param_shapes:
120
+ for name in s.keys():
121
+ param_names.append(name)
122
+
123
+ # update with frozen parameters
124
+ frozen_param_shapes = state_dict.get(FROZEN_PARAM_SHAPES, None)
125
+ if frozen_param_shapes is not None:
126
+ if debug:
127
+ print(f"Found frozen_param_shapes: {frozen_param_shapes}")
128
+ param_names += list(frozen_param_shapes.keys())
129
+
130
+ # handle shared params
131
+ shared_params = [[k, v] for k, v in state_dict["shared_params"].items()]
132
+
133
+ ds_version = state_dict.get(DS_VERSION, None)
134
+
135
+ frozen_param_fragments = state_dict.get(FROZEN_PARAM_FRAGMENTS, None)
136
+
137
+ z_model_state = zero_model_state(buffers=buffers,
138
+ param_shapes=param_shapes,
139
+ shared_params=shared_params,
140
+ ds_version=ds_version,
141
+ frozen_param_shapes=frozen_param_shapes,
142
+ frozen_param_fragments=frozen_param_fragments)
143
+ zero_model_states.append(z_model_state)
144
+
145
+ return zero_model_states
146
+
147
+
148
+ def parse_optim_states(files, ds_checkpoint_dir):
149
+ total_files = len(files)
150
+ state_dicts = []
151
+ for f in tqdm(files, desc='Loading checkpoint shards'):
152
+ state_dict = torch.load(f, map_location=device, mmap=True, weights_only=False)
153
+ # immediately discard the potentially huge 2 optimizer states as we only care for fp32 master weights
154
+ # and also handle the case where it was already removed by another helper script
155
+ state_dict["optimizer_state_dict"].pop("optimizer_state_dict", None)
156
+ state_dicts.append(state_dict)
157
+
158
+ if ZERO_STAGE not in state_dicts[0][OPTIMIZER_STATE_DICT]:
159
+ raise ValueError(f"{files[0]} is not a zero checkpoint")
160
+ zero_stage = state_dicts[0][OPTIMIZER_STATE_DICT][ZERO_STAGE]
161
+ world_size = state_dicts[0][OPTIMIZER_STATE_DICT][PARTITION_COUNT]
162
+
163
+ # For ZeRO-2 each param group can have different partition_count as data parallelism for expert
164
+ # parameters can be different from data parallelism for non-expert parameters. So we can just
165
+ # use the max of the partition_count to get the dp world_size.
166
+
167
+ if type(world_size) is list:
168
+ world_size = max(world_size)
169
+
170
+ if world_size != total_files:
171
+ raise ValueError(
172
+ f"Expected {world_size} of '*_optim_states.pt' under '{ds_checkpoint_dir}' but found {total_files} files. "
173
+ "Possibly due to an overwrite of an old checkpoint, or a checkpoint didn't get saved by one or more processes."
174
+ )
175
+
176
+ # the groups are named differently in each stage
177
+ if zero_stage <= 2:
178
+ fp32_groups_key = SINGLE_PARTITION_OF_FP32_GROUPS
179
+ elif zero_stage == 3:
180
+ fp32_groups_key = FP32_FLAT_GROUPS
181
+ else:
182
+ raise ValueError(f"unknown zero stage {zero_stage}")
183
+
184
+ fp32_flat_groups = [state_dicts[i][OPTIMIZER_STATE_DICT][fp32_groups_key] for i in range(len(state_dicts))]
185
+ return zero_stage, world_size, fp32_flat_groups
186
+
187
+
188
+ def _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters):
189
+ """
190
+ Returns fp32 state_dict reconstructed from ds checkpoint
191
+
192
+ Args:
193
+ - ``ds_checkpoint_dir``: path to the deepspeed checkpoint folder (where the optimizer files are)
194
+
195
+ """
196
+ print(f"Processing zero checkpoint '{ds_checkpoint_dir}'")
197
+
198
+ optim_files = get_optim_files(ds_checkpoint_dir)
199
+ zero_stage, world_size, fp32_flat_groups = parse_optim_states(optim_files, ds_checkpoint_dir)
200
+ print(f"Detected checkpoint of type zero stage {zero_stage}, world_size: {world_size}")
201
+
202
+ model_files = get_model_state_files(ds_checkpoint_dir)
203
+
204
+ zero_model_states = parse_model_states(model_files)
205
+ print(f'Parsing checkpoint created by deepspeed=={zero_model_states[0].ds_version}')
206
+
207
+ if zero_stage <= 2:
208
+ return _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
209
+ exclude_frozen_parameters)
210
+ elif zero_stage == 3:
211
+ return _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
212
+ exclude_frozen_parameters)
213
+
214
+
215
+ def _zero2_merge_frozen_params(state_dict, zero_model_states):
216
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
217
+ return
218
+
219
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
220
+ frozen_param_fragments = zero_model_states[0].frozen_param_fragments
221
+
222
+ if debug:
223
+ num_elem = sum(s.numel() for s in frozen_param_shapes.values())
224
+ print(f'rank 0: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
225
+
226
+ wanted_params = len(frozen_param_shapes)
227
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
228
+ avail_numel = sum([p.numel() for p in frozen_param_fragments.values()])
229
+ print(f'Frozen params: Have {avail_numel} numels to process.')
230
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
231
+
232
+ total_params = 0
233
+ total_numel = 0
234
+ for name, shape in frozen_param_shapes.items():
235
+ total_params += 1
236
+ unpartitioned_numel = shape.numel()
237
+ total_numel += unpartitioned_numel
238
+
239
+ state_dict[name] = frozen_param_fragments[name]
240
+
241
+ if debug:
242
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
243
+
244
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
245
+
246
+
247
+ def _has_callable(obj, fn):
248
+ attr = getattr(obj, fn, None)
249
+ return callable(attr)
250
+
251
+
252
+ def _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
253
+ param_shapes = zero_model_states[0].param_shapes
254
+
255
+ # Reconstruction protocol:
256
+ #
257
+ # XXX: document this
258
+
259
+ if debug:
260
+ for i in range(world_size):
261
+ for j in range(len(fp32_flat_groups[0])):
262
+ print(f"{FP32_FLAT_GROUPS}[{i}][{j}].shape={fp32_flat_groups[i][j].shape}")
263
+
264
+ # XXX: memory usage doubles here (zero2)
265
+ num_param_groups = len(fp32_flat_groups[0])
266
+ merged_single_partition_of_fp32_groups = []
267
+ for i in range(num_param_groups):
268
+ merged_partitions = [sd[i] for sd in fp32_flat_groups]
269
+ full_single_fp32_vector = torch.cat(merged_partitions, 0)
270
+ merged_single_partition_of_fp32_groups.append(full_single_fp32_vector)
271
+ avail_numel = sum(
272
+ [full_single_fp32_vector.numel() for full_single_fp32_vector in merged_single_partition_of_fp32_groups])
273
+
274
+ if debug:
275
+ wanted_params = sum([len(shapes) for shapes in param_shapes])
276
+ wanted_numel = sum([sum(shape.numel() for shape in shapes.values()) for shapes in param_shapes])
277
+ # not asserting if there is a mismatch due to possible padding
278
+ print(f"Have {avail_numel} numels to process.")
279
+ print(f"Need {wanted_numel} numels in {wanted_params} params.")
280
+
281
+ # params
282
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
283
+ # out-of-core computing solution
284
+ total_numel = 0
285
+ total_params = 0
286
+ for shapes, full_single_fp32_vector in zip(param_shapes, merged_single_partition_of_fp32_groups):
287
+ offset = 0
288
+ avail_numel = full_single_fp32_vector.numel()
289
+ for name, shape in shapes.items():
290
+
291
+ unpartitioned_numel = shape.numel() if _has_callable(shape, 'numel') else math.prod(shape)
292
+ total_numel += unpartitioned_numel
293
+ total_params += 1
294
+
295
+ if debug:
296
+ print(f"{name} full shape: {shape} unpartitioned numel {unpartitioned_numel} ")
297
+ state_dict[name] = full_single_fp32_vector.narrow(0, offset, unpartitioned_numel).view(shape)
298
+ offset += unpartitioned_numel
299
+
300
+ # Z2 started to align to 2*world_size to improve nccl performance. Therefore both offset and
301
+ # avail_numel can differ by anywhere between 0..2*world_size. Due to two unrelated complex
302
+ # paddings performed in the code it's almost impossible to predict the exact numbers w/o the
303
+ # live optimizer object, so we are checking that the numbers are within the right range
304
+ align_to = 2 * world_size
305
+
306
+ def zero2_align(x):
307
+ return align_to * math.ceil(x / align_to)
308
+
309
+ if debug:
310
+ print(f"original offset={offset}, avail_numel={avail_numel}")
311
+
312
+ offset = zero2_align(offset)
313
+ avail_numel = zero2_align(avail_numel)
314
+
315
+ if debug:
316
+ print(f"aligned offset={offset}, avail_numel={avail_numel}")
317
+
318
+ # Sanity check
319
+ if offset != avail_numel:
320
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
321
+
322
+ print(f"Reconstructed fp32 state dict with {total_params} params {total_numel} elements")
323
+
324
+
325
+ def _get_fp32_state_dict_from_zero2_checkpoint(world_size, fp32_flat_groups, zero_model_states,
326
+ exclude_frozen_parameters):
327
+ state_dict = OrderedDict()
328
+
329
+ # buffers
330
+ buffers = zero_model_states[0].buffers
331
+ state_dict.update(buffers)
332
+ if debug:
333
+ print(f"added {len(buffers)} buffers")
334
+
335
+ if not exclude_frozen_parameters:
336
+ _zero2_merge_frozen_params(state_dict, zero_model_states)
337
+
338
+ _zero2_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
339
+
340
+ # recover shared parameters
341
+ for pair in zero_model_states[0].shared_params:
342
+ if pair[1] in state_dict:
343
+ state_dict[pair[0]] = state_dict[pair[1]]
344
+
345
+ return state_dict
346
+
347
+
348
+ def zero3_partitioned_param_info(unpartitioned_numel, world_size):
349
+ remainder = unpartitioned_numel % world_size
350
+ padding_numel = (world_size - remainder) if remainder else 0
351
+ partitioned_numel = math.ceil(unpartitioned_numel / world_size)
352
+ return partitioned_numel, padding_numel
353
+
354
+
355
+ def _zero3_merge_frozen_params(state_dict, world_size, zero_model_states):
356
+ if zero_model_states[0].frozen_param_shapes is None or len(zero_model_states[0].frozen_param_shapes) == 0:
357
+ return
358
+
359
+ if debug:
360
+ for i in range(world_size):
361
+ num_elem = sum(s.numel() for s in zero_model_states[i].frozen_param_fragments.values())
362
+ print(f'rank {i}: {FROZEN_PARAM_SHAPES}.numel = {num_elem}')
363
+
364
+ frozen_param_shapes = zero_model_states[0].frozen_param_shapes
365
+ wanted_params = len(frozen_param_shapes)
366
+ wanted_numel = sum(s.numel() for s in frozen_param_shapes.values())
367
+ avail_numel = sum([p.numel() for p in zero_model_states[0].frozen_param_fragments.values()]) * world_size
368
+ print(f'Frozen params: Have {avail_numel} numels to process.')
369
+ print(f'Frozen params: Need {wanted_numel} numels in {wanted_params} params')
370
+
371
+ total_params = 0
372
+ total_numel = 0
373
+ for name, shape in zero_model_states[0].frozen_param_shapes.items():
374
+ total_params += 1
375
+ unpartitioned_numel = shape.numel()
376
+ total_numel += unpartitioned_numel
377
+
378
+ param_frags = tuple(model_state.frozen_param_fragments[name] for model_state in zero_model_states)
379
+ state_dict[name] = torch.cat(param_frags, 0).narrow(0, 0, unpartitioned_numel).view(shape)
380
+
381
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
382
+
383
+ if debug:
384
+ print(
385
+ f"Frozen params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
386
+ )
387
+
388
+ print(f"Reconstructed Frozen fp32 state dict with {total_params} params {total_numel} elements")
389
+
390
+
391
+ class GatheredTensor:
392
+ """
393
+ A pseudo tensor that collects partitioned weights.
394
+ It is more memory efficient when there are multiple groups.
395
+ """
396
+
397
+ def __init__(self, flat_groups, flat_groups_offset, offset, partitioned_numel, shape):
398
+ self.flat_groups = flat_groups
399
+ self.flat_groups_offset = flat_groups_offset
400
+ self.offset = offset
401
+ self.partitioned_numel = partitioned_numel
402
+ self.shape = shape
403
+ self.dtype = self.flat_groups[0][0].dtype
404
+
405
+ def contiguous(self):
406
+ """
407
+ Merge partitioned weights from flat_groups into a single tensor.
408
+ """
409
+ end_idx = self.offset + self.partitioned_numel
410
+ world_size = len(self.flat_groups)
411
+ pad_flat_param_chunks = []
412
+
413
+ for rank_i in range(world_size):
414
+ # for each rank, we need to collect weights from related group/groups
415
+ flat_groups_at_rank_i = self.flat_groups[rank_i]
416
+ start_group_id = None
417
+ end_group_id = None
418
+ for group_id in range(len(self.flat_groups_offset)):
419
+ if self.flat_groups_offset[group_id] <= self.offset < self.flat_groups_offset[group_id + 1]:
420
+ start_group_id = group_id
421
+ if self.flat_groups_offset[group_id] < end_idx <= self.flat_groups_offset[group_id + 1]:
422
+ end_group_id = group_id
423
+ break
424
+ # collect weights from related group/groups
425
+ for group_id in range(start_group_id, end_group_id + 1):
426
+ flat_tensor = flat_groups_at_rank_i[group_id]
427
+ start_offset = self.offset - self.flat_groups_offset[group_id]
428
+ end_offset = min(end_idx, self.flat_groups_offset[group_id + 1]) - self.flat_groups_offset[group_id]
429
+ pad_flat_param_chunks.append(flat_tensor[start_offset:end_offset])
430
+
431
+ # collect weights from all ranks
432
+ pad_flat_param = torch.cat(pad_flat_param_chunks, dim=0)
433
+ param = pad_flat_param[:self.shape.numel()].view(self.shape).contiguous()
434
+ return param
435
+
436
+
437
+ def _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states):
438
+ param_shapes = zero_model_states[0].param_shapes
439
+ avail_numel = sum([flat_group.numel() for flat_group in fp32_flat_groups[0]]) * world_size
440
+
441
+ # Reconstruction protocol: For zero3 we need to zip the partitions together at boundary of each
442
+ # param, re-consolidating each param, while dealing with padding if any
443
+
444
+ # merge list of dicts, preserving order
445
+ param_shapes = {k: v for d in param_shapes for k, v in d.items()}
446
+
447
+ if debug:
448
+ for i in range(world_size):
449
+ print(f"{FP32_FLAT_GROUPS}[{i}].shape={fp32_flat_groups[i].shape}")
450
+
451
+ wanted_params = len(param_shapes)
452
+ wanted_numel = sum(shape.numel() for shape in param_shapes.values())
453
+ # not asserting if there is a mismatch due to possible padding
454
+ avail_numel = fp32_flat_groups[0].numel() * world_size
455
+ print(f"Trainable params: Have {avail_numel} numels to process.")
456
+ print(f"Trainable params: Need {wanted_numel} numels in {wanted_params} params.")
457
+
458
+ # params
459
+ # XXX: for huge models that can't fit into the host's RAM we will have to recode this to support
460
+ # out-of-core computing solution
461
+ offset = 0
462
+ total_numel = 0
463
+ total_params = 0
464
+ flat_groups_offset = [0] + list(np.cumsum([flat_tensor.numel() for flat_tensor in fp32_flat_groups[0]]))
465
+ for name, shape in tqdm(param_shapes.items(), desc='Gathering sharded weights'):
466
+ unpartitioned_numel = shape.numel()
467
+ total_numel += unpartitioned_numel
468
+ total_params += 1
469
+ partitioned_numel, partitioned_padding_numel = zero3_partitioned_param_info(unpartitioned_numel, world_size)
470
+
471
+ if debug:
472
+ print(
473
+ f"Trainable params: {total_params} {name} full shape: {shape} partition0 numel={partitioned_numel} partitioned_padding_numel={partitioned_padding_numel}"
474
+ )
475
+
476
+ # memory efficient tensor
477
+ tensor = GatheredTensor(fp32_flat_groups, flat_groups_offset, offset, partitioned_numel, shape)
478
+ state_dict[name] = tensor
479
+ offset += partitioned_numel
480
+
481
+ offset *= world_size
482
+
483
+ # Sanity check
484
+ if offset != avail_numel:
485
+ raise ValueError(f"consumed {offset} numels out of {avail_numel} - something is wrong")
486
+
487
+ print(f"Reconstructed Trainable fp32 state dict with {total_params} params {total_numel} elements")
488
+
489
+
490
+ def _get_fp32_state_dict_from_zero3_checkpoint(world_size, fp32_flat_groups, zero_model_states,
491
+ exclude_frozen_parameters):
492
+ state_dict = OrderedDict()
493
+
494
+ # buffers
495
+ buffers = zero_model_states[0].buffers
496
+ state_dict.update(buffers)
497
+ if debug:
498
+ print(f"added {len(buffers)} buffers")
499
+
500
+ if not exclude_frozen_parameters:
501
+ _zero3_merge_frozen_params(state_dict, world_size, zero_model_states)
502
+
503
+ _zero3_merge_trainable_params(state_dict, world_size, fp32_flat_groups, zero_model_states)
504
+
505
+ # recover shared parameters
506
+ for pair in zero_model_states[0].shared_params:
507
+ if pair[1] in state_dict:
508
+ state_dict[pair[0]] = state_dict[pair[1]]
509
+
510
+ return state_dict
511
+
512
+
513
+ def to_torch_tensor(state_dict, return_empty_tensor=False):
514
+ """
515
+ Convert state_dict of GatheredTensor to torch tensor
516
+ """
517
+ torch_state_dict = {}
518
+ converted_tensors = {}
519
+ for name, tensor in state_dict.items():
520
+ tensor_id = id(tensor)
521
+ if tensor_id in converted_tensors: # shared tensors
522
+ shared_tensor = torch_state_dict[converted_tensors[tensor_id]]
523
+ torch_state_dict[name] = shared_tensor
524
+ else:
525
+ converted_tensors[tensor_id] = name
526
+ if return_empty_tensor:
527
+ torch_state_dict[name] = torch.empty(tensor.shape, dtype=tensor.dtype)
528
+ else:
529
+ torch_state_dict[name] = tensor.contiguous()
530
+ return torch_state_dict
531
+
532
+
533
+ def get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
534
+ tag=None,
535
+ exclude_frozen_parameters=False,
536
+ lazy_mode=False):
537
+ """
538
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated state_dict that can be loaded with
539
+ ``load_state_dict()`` and used for training without DeepSpeed or shared with others, for example
540
+ via a model hub.
541
+
542
+ Args:
543
+ - ``checkpoint_dir``: path to the desired checkpoint folder
544
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in 'latest' file. e.g., ``global_step14``
545
+ - ``exclude_frozen_parameters``: exclude frozen parameters
546
+ - ``lazy_mode``: get state_dict in lazy mode. It returns a dict of pesduo tensor instead of torch tensor, which is more memory efficient.
547
+ Convert the pesduo tensor to torch tensor by ``.contiguous()``
548
+
549
+ Returns:
550
+ - pytorch ``state_dict``
551
+
552
+ A typical usage might be ::
553
+
554
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
555
+ # do the training and checkpoint saving
556
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir) # already on cpu
557
+ model = model.cpu() # move to cpu
558
+ model.load_state_dict(state_dict)
559
+ # submit to model hub or save the model to share with others
560
+
561
+ In this example the ``model`` will no longer be usable in the deepspeed context of the same
562
+ application. i.e. you will need to re-initialize the deepspeed engine, since
563
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
564
+
565
+ If you want it all done for you, use ``load_state_dict_from_zero_checkpoint`` instead.
566
+
567
+ Note: the above usage may not work if your application doesn't have sufficient free CPU memory.
568
+ You may need to use the offline approach using the ``zero_to_fp32.py`` script that is saved with
569
+ the checkpoint. Or you can load state_dict in lazy mode ::
570
+
571
+ from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
572
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, lazy_mode=True) # not on cpu
573
+ for name, lazy_tensor in state_dict.item():
574
+ tensor = lazy_tensor.contiguous() # to cpu
575
+ print(name, tensor)
576
+ # del tensor to release memory if it no longer in use
577
+ """
578
+ if tag is None:
579
+ latest_path = os.path.join(checkpoint_dir, 'latest')
580
+ if os.path.isfile(latest_path):
581
+ with open(latest_path, 'r') as fd:
582
+ tag = fd.read().strip()
583
+ else:
584
+ raise ValueError(f"Unable to find 'latest' file at {latest_path}")
585
+
586
+ ds_checkpoint_dir = os.path.join(checkpoint_dir, tag)
587
+
588
+ if not os.path.isdir(ds_checkpoint_dir):
589
+ raise FileNotFoundError(f"Directory '{ds_checkpoint_dir}' doesn't exist")
590
+
591
+ state_dict = _get_fp32_state_dict_from_zero_checkpoint(ds_checkpoint_dir, exclude_frozen_parameters)
592
+ if lazy_mode:
593
+ return state_dict
594
+ else:
595
+ return to_torch_tensor(state_dict)
596
+
597
+
598
+ def convert_zero_checkpoint_to_fp32_state_dict(checkpoint_dir,
599
+ output_dir,
600
+ max_shard_size="5GB",
601
+ safe_serialization=False,
602
+ tag=None,
603
+ exclude_frozen_parameters=False):
604
+ """
605
+ Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict`` file that can be
606
+ loaded with ``torch.load(file)`` + ``load_state_dict()`` and used for training without DeepSpeed.
607
+
608
+ Args:
609
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
610
+ - ``output_dir``: directory to the pytorch fp32 state_dict output files
611
+ - ``max_shard_size``: the maximum size for a checkpoint before being sharded, default value is 5GB
612
+ - ``safe_serialization``: whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).
613
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
614
+ - ``exclude_frozen_parameters``: exclude frozen parameters
615
+ """
616
+
617
+ # Dependency pre-check
618
+ if safe_serialization:
619
+ try:
620
+ from safetensors.torch import save_file
621
+ except ImportError:
622
+ print('If you want to use `safe_serialization`, please `pip install safetensors`')
623
+ raise
624
+ if max_shard_size is not None:
625
+ try:
626
+ from huggingface_hub import split_torch_state_dict_into_shards
627
+ except ImportError:
628
+ print('If you want to use `max_shard_size`, please `pip install huggingface_hub`')
629
+ raise
630
+
631
+ # Convert zero checkpoint to state_dict
632
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir,
633
+ tag,
634
+ exclude_frozen_parameters,
635
+ lazy_mode=True)
636
+
637
+ # Shard the model if it is too big.
638
+ weights_name = "model.safetensors" if safe_serialization else "pytorch_model.bin"
639
+ if max_shard_size is not None:
640
+ filename_pattern = weights_name.replace(".bin", "{suffix}.bin").replace(".safetensors", "{suffix}.safetensors")
641
+ # an memory-efficient approach for sharding
642
+ empty_state_dict = to_torch_tensor(state_dict, return_empty_tensor=True)
643
+ state_dict_split = split_torch_state_dict_into_shards(empty_state_dict,
644
+ filename_pattern=filename_pattern,
645
+ max_shard_size=max_shard_size)
646
+ else:
647
+ from collections import namedtuple
648
+ StateDictSplit = namedtuple("StateDictSplit", ["is_sharded", "filename_to_tensors"])
649
+ state_dict_split = StateDictSplit(is_sharded=False,
650
+ filename_to_tensors={weights_name: list(state_dict.keys())})
651
+
652
+ # Save the model by shard
653
+ os.makedirs(output_dir, exist_ok=True)
654
+ filename_to_tensors = state_dict_split.filename_to_tensors.items()
655
+ for shard_file, tensors in tqdm(filename_to_tensors, desc="Saving checkpoint shards"):
656
+ shard_state_dict = {tensor_name: state_dict[tensor_name] for tensor_name in tensors}
657
+ shard_state_dict = to_torch_tensor(shard_state_dict)
658
+ output_path = os.path.join(output_dir, shard_file)
659
+ if safe_serialization:
660
+ save_file(shard_state_dict, output_path, metadata={"format": "pt"})
661
+ else:
662
+ torch.save(shard_state_dict, output_path)
663
+ # release the memory of current shard
664
+ for tensor_name in list(shard_state_dict.keys()):
665
+ del state_dict[tensor_name]
666
+ del shard_state_dict[tensor_name]
667
+ del shard_state_dict
668
+ gc.collect()
669
+
670
+ # Save index if sharded
671
+ if state_dict_split.is_sharded:
672
+ index = {
673
+ "metadata": state_dict_split.metadata,
674
+ "weight_map": state_dict_split.tensor_to_filename,
675
+ }
676
+ save_index_file = "model.safetensors.index.json" if safe_serialization else "pytorch_model.bin.index.json"
677
+ save_index_file = os.path.join(output_dir, save_index_file)
678
+ with open(save_index_file, "w", encoding="utf-8") as f:
679
+ content = json.dumps(index, indent=2, sort_keys=True) + "\n"
680
+ f.write(content)
681
+
682
+
683
+ def load_state_dict_from_zero_checkpoint(model, checkpoint_dir, tag=None):
684
+ """
685
+ 1. Put the provided model to cpu
686
+ 2. Convert ZeRO 2 or 3 checkpoint into a single fp32 consolidated ``state_dict``
687
+ 3. Load it into the provided model
688
+
689
+ Args:
690
+ - ``model``: the model object to update
691
+ - ``checkpoint_dir``: path to the desired checkpoint folder. (one that contains the tag-folder, like ``global_step14``)
692
+ - ``tag``: checkpoint tag used as a unique identifier for checkpoint. If not provided will attempt to load tag in the file named ``latest`` in the checkpoint folder, e.g., ``global_step14``
693
+
694
+ Returns:
695
+ - ``model`: modified model
696
+
697
+ Make sure you have plenty of CPU memory available before you call this function. If you don't
698
+ have enough use the ``zero_to_fp32.py`` utility to do the conversion. You will find it
699
+ conveniently placed for you in the checkpoint folder.
700
+
701
+ A typical usage might be ::
702
+
703
+ from deepspeed.utils.zero_to_fp32 import load_state_dict_from_zero_checkpoint
704
+ model = load_state_dict_from_zero_checkpoint(trainer.model, checkpoint_dir)
705
+ # submit to model hub or save the model to share with others
706
+
707
+ Note, that once this was run, the ``model`` will no longer be usable in the deepspeed context
708
+ of the same application. i.e. you will need to re-initialize the deepspeed engine, since
709
+ ``model.load_state_dict(state_dict)`` will remove all the deepspeed magic from it.
710
+
711
+ """
712
+ logger.info("Extracting fp32 weights")
713
+ state_dict = get_fp32_state_dict_from_zero_checkpoint(checkpoint_dir, tag)
714
+
715
+ logger.info("Overwriting model with fp32 weights")
716
+ model = model.cpu()
717
+ model.load_state_dict(state_dict, strict=False)
718
+
719
+ return model
720
+
721
+
722
+ if __name__ == "__main__":
723
+ parser = argparse.ArgumentParser()
724
+ parser.add_argument("checkpoint_dir",
725
+ type=str,
726
+ help="path to the desired checkpoint folder, e.g., path/checkpoint-12")
727
+ parser.add_argument("output_dir",
728
+ type=str,
729
+ help="directory to the pytorch fp32 state_dict output files"
730
+ "(e.g. path/checkpoint-12-output/)")
731
+ parser.add_argument(
732
+ "--max_shard_size",
733
+ type=str,
734
+ default="5GB",
735
+ help="The maximum size for a checkpoint before being sharded. Checkpoints shard will then be each of size"
736
+ "lower than this size. If expressed as a string, needs to be digits followed by a unit (like `5MB`"
737
+ "We default it to 5GB in order for models to be able to run easily on free-tier google colab instances"
738
+ "without CPU OOM issues.")
739
+ parser.add_argument(
740
+ "--safe_serialization",
741
+ default=False,
742
+ action='store_true',
743
+ help="Whether to save the model using `safetensors` or the traditional PyTorch way (that uses `pickle`).")
744
+ parser.add_argument("-t",
745
+ "--tag",
746
+ type=str,
747
+ default=None,
748
+ help="checkpoint tag used as a unique identifier for checkpoint. e.g., global_step1")
749
+ parser.add_argument("--exclude_frozen_parameters", action='store_true', help="exclude frozen parameters")
750
+ parser.add_argument("-d", "--debug", action='store_true', help="enable debug")
751
+ args = parser.parse_args()
752
+
753
+ debug = args.debug
754
+
755
+ convert_zero_checkpoint_to_fp32_state_dict(args.checkpoint_dir,
756
+ args.output_dir,
757
+ max_shard_size=args.max_shard_size,
758
+ safe_serialization=args.safe_serialization,
759
+ tag=args.tag,
760
+ exclude_frozen_parameters=args.exclude_frozen_parameters)