thomas0829 commited on
Commit
6ef0749
·
verified ·
1 Parent(s): 562b520

Upload DataProcessorPipeline

Browse files
policy_postprocessor.json ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "policy_postprocessor",
3
+ "steps": [
4
+ {
5
+ "registry_name": "unnormalizer_processor",
6
+ "config": {
7
+ "eps": 1e-08,
8
+ "features": {
9
+ "action": {
10
+ "type": "ACTION",
11
+ "shape": [
12
+ 14
13
+ ]
14
+ }
15
+ },
16
+ "norm_map": {
17
+ "VISUAL": "IDENTITY",
18
+ "STATE": "QUANTILES",
19
+ "ACTION": "QUANTILES"
20
+ }
21
+ },
22
+ "state_file": "policy_postprocessor_step_0_unnormalizer_processor.safetensors"
23
+ },
24
+ {
25
+ "registry_name": "device_processor",
26
+ "config": {
27
+ "device": "cpu",
28
+ "float_dtype": null
29
+ }
30
+ }
31
+ ]
32
+ }
policy_postprocessor_step_0_unnormalizer_processor.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:816f260bf258a10da64c96038713e819ac6ed2653a4a497762c32ab092a293a1
3
+ size 9400
policy_preprocessor.json ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "name": "policy_preprocessor",
3
+ "steps": [
4
+ {
5
+ "registry_name": "rename_observations_processor",
6
+ "config": {
7
+ "rename_map": {}
8
+ }
9
+ },
10
+ {
11
+ "registry_name": "to_batch_processor",
12
+ "config": {}
13
+ },
14
+ {
15
+ "registry_name": "normalizer_processor",
16
+ "config": {
17
+ "eps": 1e-08,
18
+ "features": {
19
+ "observation.state": {
20
+ "type": "STATE",
21
+ "shape": [
22
+ 14
23
+ ]
24
+ },
25
+ "observation.images.left": {
26
+ "type": "VISUAL",
27
+ "shape": [
28
+ 3,
29
+ 224,
30
+ 224
31
+ ]
32
+ },
33
+ "observation.images.top": {
34
+ "type": "VISUAL",
35
+ "shape": [
36
+ 3,
37
+ 224,
38
+ 224
39
+ ]
40
+ },
41
+ "observation.images.right": {
42
+ "type": "VISUAL",
43
+ "shape": [
44
+ 3,
45
+ 224,
46
+ 224
47
+ ]
48
+ },
49
+ "action": {
50
+ "type": "ACTION",
51
+ "shape": [
52
+ 14
53
+ ]
54
+ }
55
+ },
56
+ "norm_map": {
57
+ "VISUAL": "IDENTITY",
58
+ "STATE": "QUANTILES",
59
+ "ACTION": "QUANTILES"
60
+ }
61
+ },
62
+ "state_file": "policy_preprocessor_step_2_normalizer_processor.safetensors"
63
+ },
64
+ {
65
+ "registry_name": "pi05_prepare_state_tokenizer_processor_step",
66
+ "config": {}
67
+ },
68
+ {
69
+ "registry_name": "tokenizer_processor",
70
+ "config": {
71
+ "max_length": 200,
72
+ "task_key": "task",
73
+ "padding_side": "right",
74
+ "padding": "max_length",
75
+ "truncation": true,
76
+ "tokenizer_name": "google/paligemma-3b-pt-224"
77
+ }
78
+ },
79
+ {
80
+ "registry_name": "device_processor",
81
+ "config": {
82
+ "device": "cuda",
83
+ "float_dtype": null
84
+ }
85
+ }
86
+ ]
87
+ }
policy_preprocessor_step_2_normalizer_processor.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:816f260bf258a10da64c96038713e819ac6ed2653a4a497762c32ab092a293a1
3
+ size 9400
train_config.json ADDED
@@ -0,0 +1,244 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "thomas0829/put_the_dolls_on_the_cloth",
4
+ "repo_ids": null,
5
+ "root": null,
6
+ "episodes": null,
7
+ "image_transforms": {
8
+ "enable": true,
9
+ "max_num_transforms": 3,
10
+ "random_order": false,
11
+ "tfs": {
12
+ "brightness": {
13
+ "weight": 1.0,
14
+ "type": "ColorJitter",
15
+ "kwargs": {
16
+ "brightness": [
17
+ 0.8,
18
+ 1.2
19
+ ]
20
+ }
21
+ },
22
+ "contrast": {
23
+ "weight": 1.0,
24
+ "type": "ColorJitter",
25
+ "kwargs": {
26
+ "contrast": [
27
+ 0.8,
28
+ 1.2
29
+ ]
30
+ }
31
+ },
32
+ "saturation": {
33
+ "weight": 1.0,
34
+ "type": "ColorJitter",
35
+ "kwargs": {
36
+ "saturation": [
37
+ 0.5,
38
+ 1.5
39
+ ]
40
+ }
41
+ },
42
+ "hue": {
43
+ "weight": 1.0,
44
+ "type": "ColorJitter",
45
+ "kwargs": {
46
+ "hue": [
47
+ -0.05,
48
+ 0.05
49
+ ]
50
+ }
51
+ },
52
+ "sharpness": {
53
+ "weight": 1.0,
54
+ "type": "SharpnessJitter",
55
+ "kwargs": {
56
+ "sharpness": [
57
+ 0.5,
58
+ 1.5
59
+ ]
60
+ }
61
+ },
62
+ "affine": {
63
+ "weight": 1.0,
64
+ "type": "RandomAffine",
65
+ "kwargs": {
66
+ "degrees": [
67
+ -5.0,
68
+ 5.0
69
+ ],
70
+ "translate": [
71
+ 0.05,
72
+ 0.05
73
+ ]
74
+ }
75
+ }
76
+ }
77
+ },
78
+ "revision": null,
79
+ "use_imagenet_stats": true,
80
+ "video_backend": "torchcodec",
81
+ "force_cache_sync": false,
82
+ "use_annotated_tasks": false
83
+ },
84
+ "num_datasets": 100,
85
+ "env": null,
86
+ "policy": {
87
+ "type": "pi05",
88
+ "n_obs_steps": 1,
89
+ "normalization_mapping": {
90
+ "VISUAL": "IDENTITY",
91
+ "STATE": "QUANTILES",
92
+ "ACTION": "QUANTILES"
93
+ },
94
+ "input_features": {
95
+ "observation.state": {
96
+ "type": "STATE",
97
+ "shape": [
98
+ 14
99
+ ]
100
+ },
101
+ "observation.images.left": {
102
+ "type": "VISUAL",
103
+ "shape": [
104
+ 3,
105
+ 224,
106
+ 224
107
+ ]
108
+ },
109
+ "observation.images.top": {
110
+ "type": "VISUAL",
111
+ "shape": [
112
+ 3,
113
+ 224,
114
+ 224
115
+ ]
116
+ },
117
+ "observation.images.right": {
118
+ "type": "VISUAL",
119
+ "shape": [
120
+ 3,
121
+ 224,
122
+ 224
123
+ ]
124
+ }
125
+ },
126
+ "output_features": {
127
+ "action": {
128
+ "type": "ACTION",
129
+ "shape": [
130
+ 14
131
+ ]
132
+ }
133
+ },
134
+ "device": "cuda",
135
+ "use_amp": false,
136
+ "compiled": false,
137
+ "push_to_hub": true,
138
+ "repo_id": "thomas0829/finetune_pi05_test",
139
+ "private": false,
140
+ "tags": null,
141
+ "license": null,
142
+ "pretrained_path": "thomas0829/pi05-pytorch-base",
143
+ "paligemma_variant": "gemma_2b",
144
+ "action_expert_variant": "gemma_300m",
145
+ "dtype": "bfloat16",
146
+ "chunk_size": 50,
147
+ "n_action_steps": 50,
148
+ "max_state_dim": 32,
149
+ "max_action_dim": 32,
150
+ "num_inference_steps": 10,
151
+ "time_sampling_beta_alpha": 1.5,
152
+ "time_sampling_beta_beta": 1.0,
153
+ "time_sampling_scale": 0.999,
154
+ "time_sampling_offset": 0.001,
155
+ "min_period": 0.004,
156
+ "max_period": 4.0,
157
+ "rtc_config": null,
158
+ "image_resolution": [
159
+ 224,
160
+ 224
161
+ ],
162
+ "empty_cameras": 0,
163
+ "tokenizer_max_length": 200,
164
+ "gradient_checkpointing": true,
165
+ "compile_model": false,
166
+ "compile_mode": "max-autotune",
167
+ "attention_implementation": "eager",
168
+ "use_lora": false,
169
+ "lora_rank": 16,
170
+ "lora_alpha": 32.0,
171
+ "lora_dropout": 0.1,
172
+ "lora_target_modules": null,
173
+ "optimizer_lr": 0.0001,
174
+ "optimizer_betas": [
175
+ 0.9,
176
+ 0.95
177
+ ],
178
+ "optimizer_eps": 1e-08,
179
+ "optimizer_weight_decay": 0.01,
180
+ "optimizer_grad_clip_norm": 1.0,
181
+ "scheduler_warmup_steps": 1000,
182
+ "scheduler_decay_steps": 1000000,
183
+ "scheduler_decay_lr": 1e-05
184
+ },
185
+ "compile": false,
186
+ "strict": true,
187
+ "loss_threshold": 3.0,
188
+ "output_dir": "outputs/train/2026-02-02/15-23-50_pi05_training",
189
+ "job_name": "pi05_training",
190
+ "resume": false,
191
+ "resume_scheduler": true,
192
+ "seed": 3407,
193
+ "num_workers": 4,
194
+ "batch_size": 1,
195
+ "gradient_accumulation_steps": 2,
196
+ "steps": 10,
197
+ "eval_freq": 20000,
198
+ "log_freq": 5,
199
+ "save_checkpoint": true,
200
+ "push_to_hub": false,
201
+ "repo_id": null,
202
+ "save_freq": 10,
203
+ "use_policy_training_preset": true,
204
+ "optimizer": {
205
+ "type": "adamw",
206
+ "lr": 0.0001,
207
+ "weight_decay": 0.01,
208
+ "grad_clip_norm": 1.0,
209
+ "betas": [
210
+ 0.9,
211
+ 0.95
212
+ ],
213
+ "eps": 1e-08
214
+ },
215
+ "scheduler": {
216
+ "type": "cosine_decay_with_warmup",
217
+ "num_warmup_steps": 1000,
218
+ "num_decay_steps": 1000000,
219
+ "peak_lr": 0.0001,
220
+ "decay_lr": 1e-05
221
+ },
222
+ "eval": {
223
+ "n_episodes": 50,
224
+ "batch_size": 50,
225
+ "use_async_envs": false
226
+ },
227
+ "wandb": {
228
+ "enable": true,
229
+ "disable_artifact": true,
230
+ "project": "yam-pi05-finetune",
231
+ "entity": null,
232
+ "notes": "Full fine-tuning of pi05 on put_the_dolls_on_the_cloth dataset",
233
+ "run_id": null,
234
+ "mode": null
235
+ },
236
+ "test_dataloader": false,
237
+ "num_epochs": 1,
238
+ "ddp_timeout_s": 6000,
239
+ "rename_map": {
240
+ "observation.images.front_camera": "observation.images.top",
241
+ "observation.images.left_camera": "observation.images.left",
242
+ "observation.images.right_camera": "observation.images.right"
243
+ }
244
+ }