nnh-pbbb commited on
Commit
7c8ebdc
·
verified ·
1 Parent(s): eecddb8

Upload folder using huggingface_hub

Browse files
015000/pretrained_model/config.json ADDED
@@ -0,0 +1,108 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "type": "pi05",
3
+ "n_obs_steps": 1,
4
+ "input_features": {
5
+ "observation.state": {
6
+ "type": "STATE",
7
+ "shape": [
8
+ 20
9
+ ]
10
+ },
11
+ "observation.state_eepose": {
12
+ "type": "STATE",
13
+ "shape": [
14
+ 14
15
+ ]
16
+ },
17
+ "observation.images.cam_high": {
18
+ "type": "VISUAL",
19
+ "shape": [
20
+ 3,
21
+ 360,
22
+ 480
23
+ ]
24
+ },
25
+ "observation.images.cam_left_wrist": {
26
+ "type": "VISUAL",
27
+ "shape": [
28
+ 3,
29
+ 360,
30
+ 480
31
+ ]
32
+ },
33
+ "observation.images.cam_right_wrist": {
34
+ "type": "VISUAL",
35
+ "shape": [
36
+ 3,
37
+ 360,
38
+ 480
39
+ ]
40
+ }
41
+ },
42
+ "output_features": {
43
+ "action": {
44
+ "type": "ACTION",
45
+ "shape": [
46
+ 20
47
+ ]
48
+ },
49
+ "action_eepose": {
50
+ "type": "ACTION",
51
+ "shape": [
52
+ 14
53
+ ]
54
+ }
55
+ },
56
+ "device": "cpu",
57
+ "use_amp": false,
58
+ "push_to_hub": false,
59
+ "repo_id": null,
60
+ "private": null,
61
+ "tags": null,
62
+ "license": null,
63
+ "pretrained_path": "checkpoints/pi05_base",
64
+ "paligemma_variant": "gemma_2b",
65
+ "action_expert_variant": "gemma_300m",
66
+ "dtype": "bfloat16",
67
+ "chunk_size": 50,
68
+ "n_action_steps": 50,
69
+ "max_state_dim": 32,
70
+ "max_action_dim": 32,
71
+ "state_cond": true,
72
+ "num_inference_steps": 10,
73
+ "time_sampling_beta_alpha": 1.5,
74
+ "time_sampling_beta_beta": 1.0,
75
+ "time_sampling_scale": 0.999,
76
+ "time_sampling_offset": 0.001,
77
+ "min_period": 0.004,
78
+ "max_period": 4.0,
79
+ "image_resolution": [
80
+ 224,
81
+ 224
82
+ ],
83
+ "empty_cameras": 0,
84
+ "tokenizer_max_length": 200,
85
+ "normalization_mapping": {
86
+ "VISUAL": "IDENTITY",
87
+ "STATE": "MEAN_STD",
88
+ "ACTION": "MEAN_STD"
89
+ },
90
+ "gradient_checkpointing": false,
91
+ "compile_model": false,
92
+ "compile_mode": "max-autotune",
93
+ "fuse_qkv": false,
94
+ "fuse_gate_up": false,
95
+ "optimizer_lr": 2.5e-05,
96
+ "optimizer_betas": [
97
+ 0.9,
98
+ 0.95
99
+ ],
100
+ "optimizer_eps": 1e-08,
101
+ "optimizer_weight_decay": 0.01,
102
+ "optimizer_grad_clip_norm": 1.0,
103
+ "scheduler_warmup_steps": 1000,
104
+ "scheduler_decay_steps": 30000,
105
+ "scheduler_decay_lr": 2.5e-06,
106
+ "vlm_config": {},
107
+ "action_expert_config": {}
108
+ }
015000/pretrained_model/lora_adapters/README.md ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ library_name: peft
3
+ tags:
4
+ - lora
5
+ ---
6
+
7
+ # Model Card for Model ID
8
+
9
+ <!-- Provide a quick summary of what the model is/does. -->
10
+
11
+
12
+
13
+ ## Model Details
14
+
15
+ ### Model Description
16
+
17
+ <!-- Provide a longer summary of what this model is. -->
18
+
19
+
20
+
21
+ - **Developed by:** [More Information Needed]
22
+ - **Funded by [optional]:** [More Information Needed]
23
+ - **Shared by [optional]:** [More Information Needed]
24
+ - **Model type:** [More Information Needed]
25
+ - **Language(s) (NLP):** [More Information Needed]
26
+ - **License:** [More Information Needed]
27
+ - **Finetuned from model [optional]:** [More Information Needed]
28
+
29
+ ### Model Sources [optional]
30
+
31
+ <!-- Provide the basic links for the model. -->
32
+
33
+ - **Repository:** [More Information Needed]
34
+ - **Paper [optional]:** [More Information Needed]
35
+ - **Demo [optional]:** [More Information Needed]
36
+
37
+ ## Uses
38
+
39
+ <!-- Address questions around how the model is intended to be used, including the foreseeable users of the model and those affected by the model. -->
40
+
41
+ ### Direct Use
42
+
43
+ <!-- This section is for the model use without fine-tuning or plugging into a larger ecosystem/app. -->
44
+
45
+ [More Information Needed]
46
+
47
+ ### Downstream Use [optional]
48
+
49
+ <!-- This section is for the model use when fine-tuned for a task, or when plugged into a larger ecosystem/app -->
50
+
51
+ [More Information Needed]
52
+
53
+ ### Out-of-Scope Use
54
+
55
+ <!-- This section addresses misuse, malicious use, and uses that the model will not work well for. -->
56
+
57
+ [More Information Needed]
58
+
59
+ ## Bias, Risks, and Limitations
60
+
61
+ <!-- This section is meant to convey both technical and sociotechnical limitations. -->
62
+
63
+ [More Information Needed]
64
+
65
+ ### Recommendations
66
+
67
+ <!-- This section is meant to convey recommendations with respect to the bias, risk, and technical limitations. -->
68
+
69
+ Users (both direct and downstream) should be made aware of the risks, biases and limitations of the model. More information needed for further recommendations.
70
+
71
+ ## How to Get Started with the Model
72
+
73
+ Use the code below to get started with the model.
74
+
75
+ [More Information Needed]
76
+
77
+ ## Training Details
78
+
79
+ ### Training Data
80
+
81
+ <!-- This should link to a Dataset Card, perhaps with a short stub of information on what the training data is all about as well as documentation related to data pre-processing or additional filtering. -->
82
+
83
+ [More Information Needed]
84
+
85
+ ### Training Procedure
86
+
87
+ <!-- This relates heavily to the Technical Specifications. Content here should link to that section when it is relevant to the training procedure. -->
88
+
89
+ #### Preprocessing [optional]
90
+
91
+ [More Information Needed]
92
+
93
+
94
+ #### Training Hyperparameters
95
+
96
+ - **Training regime:** [More Information Needed] <!--fp32, fp16 mixed precision, bf16 mixed precision, bf16 non-mixed precision, fp16 non-mixed precision, fp8 mixed precision -->
97
+
98
+ #### Speeds, Sizes, Times [optional]
99
+
100
+ <!-- This section provides information about throughput, start/end time, checkpoint size if relevant, etc. -->
101
+
102
+ [More Information Needed]
103
+
104
+ ## Evaluation
105
+
106
+ <!-- This section describes the evaluation protocols and provides the results. -->
107
+
108
+ ### Testing Data, Factors & Metrics
109
+
110
+ #### Testing Data
111
+
112
+ <!-- This should link to a Dataset Card if possible. -->
113
+
114
+ [More Information Needed]
115
+
116
+ #### Factors
117
+
118
+ <!-- These are the things the evaluation is disaggregating by, e.g., subpopulations or domains. -->
119
+
120
+ [More Information Needed]
121
+
122
+ #### Metrics
123
+
124
+ <!-- These are the evaluation metrics being used, ideally with a description of why. -->
125
+
126
+ [More Information Needed]
127
+
128
+ ### Results
129
+
130
+ [More Information Needed]
131
+
132
+ #### Summary
133
+
134
+
135
+
136
+ ## Model Examination [optional]
137
+
138
+ <!-- Relevant interpretability work for the model goes here -->
139
+
140
+ [More Information Needed]
141
+
142
+ ## Environmental Impact
143
+
144
+ <!-- Total emissions (in grams of CO2eq) and additional considerations, such as electricity usage, go here. Edit the suggested text below accordingly -->
145
+
146
+ Carbon emissions can be estimated using the [Machine Learning Impact calculator](https://mlco2.github.io/impact#compute) presented in [Lacoste et al. (2019)](https://arxiv.org/abs/1910.09700).
147
+
148
+ - **Hardware Type:** [More Information Needed]
149
+ - **Hours used:** [More Information Needed]
150
+ - **Cloud Provider:** [More Information Needed]
151
+ - **Compute Region:** [More Information Needed]
152
+ - **Carbon Emitted:** [More Information Needed]
153
+
154
+ ## Technical Specifications [optional]
155
+
156
+ ### Model Architecture and Objective
157
+
158
+ [More Information Needed]
159
+
160
+ ### Compute Infrastructure
161
+
162
+ [More Information Needed]
163
+
164
+ #### Hardware
165
+
166
+ [More Information Needed]
167
+
168
+ #### Software
169
+
170
+ [More Information Needed]
171
+
172
+ ## Citation [optional]
173
+
174
+ <!-- If there is a paper or blog post introducing the model, the APA and Bibtex information for that should go in this section. -->
175
+
176
+ **BibTeX:**
177
+
178
+ [More Information Needed]
179
+
180
+ **APA:**
181
+
182
+ [More Information Needed]
183
+
184
+ ## Glossary [optional]
185
+
186
+ <!-- If relevant, include terms and calculations in this section that can help readers understand the model or model card. -->
187
+
188
+ [More Information Needed]
189
+
190
+ ## More Information [optional]
191
+
192
+ [More Information Needed]
193
+
194
+ ## Model Card Authors [optional]
195
+
196
+ [More Information Needed]
197
+
198
+ ## Model Card Contact
199
+
200
+ [More Information Needed]
201
+ ### Framework versions
202
+
203
+ - PEFT 0.18.0
015000/pretrained_model/lora_adapters/adapter_config.json ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": null,
6
+ "base_model_name_or_path": null,
7
+ "bias": "none",
8
+ "corda_config": null,
9
+ "ensure_weight_tying": false,
10
+ "eva_config": null,
11
+ "exclude_modules": null,
12
+ "fan_in_fan_out": false,
13
+ "inference_mode": true,
14
+ "init_lora_weights": true,
15
+ "layer_replication": null,
16
+ "layers_pattern": null,
17
+ "layers_to_transform": null,
18
+ "loftq_config": {},
19
+ "lora_alpha": 16,
20
+ "lora_bias": false,
21
+ "lora_dropout": 0.0,
22
+ "megatron_config": null,
23
+ "megatron_core": "megatron.core",
24
+ "modules_to_save": [
25
+ "vlm.model.language_model.layers.7.input_layernorm",
26
+ "action_expert.model.layers.15.input_layernorm.dense",
27
+ "vlm.model.language_model.layers.13.input_layernorm",
28
+ "action_expert.model.layers.2.post_attention_layernorm.dense",
29
+ "action_expert.model.layers.17.input_layernorm.dense",
30
+ "vlm.model.language_model.layers.12.input_layernorm",
31
+ "action_expert.model.layers.9.post_attention_layernorm.dense",
32
+ "vlm.model.language_model.layers.8.post_attention_layernorm",
33
+ "action_expert.model.layers.6.input_layernorm.dense",
34
+ "vlm.model.language_model.layers.11.post_attention_layernorm",
35
+ "action_expert.model.layers.17.post_attention_layernorm.dense",
36
+ "action_expert.model.layers.8.input_layernorm.dense",
37
+ "action_expert.model.layers.8.post_attention_layernorm.dense",
38
+ "vlm.model.language_model.layers.17.post_attention_layernorm",
39
+ "action_expert.model.layers.7.input_layernorm.dense",
40
+ "action_expert.model.layers.16.post_attention_layernorm.dense",
41
+ "vlm.model.language_model.layers.1.post_attention_layernorm",
42
+ "action_expert.model.layers.1.input_layernorm.dense",
43
+ "suffix_embedder.state_mlp_out",
44
+ "vlm.model.language_model.layers.4.input_layernorm",
45
+ "action_expert.model.layers.5.input_layernorm.dense",
46
+ "vlm.model.language_model.layers.1.input_layernorm",
47
+ "suffix_embedder.time_mlp_in",
48
+ "vlm.model.vision_tower.vision_model.embeddings.patch_embedding",
49
+ "action_expert.model.layers.16.input_layernorm.dense",
50
+ "vlm.model.language_model.layers.13.post_attention_layernorm",
51
+ "vlm.model.language_model.layers.14.input_layernorm",
52
+ "action_expert.model.layers.4.post_attention_layernorm.dense",
53
+ "suffix_embedder.state_proj",
54
+ "action_expert.model.layers.10.post_attention_layernorm.dense",
55
+ "vlm.model.language_model.layers.10.input_layernorm",
56
+ "vlm.model.language_model.layers.2.post_attention_layernorm",
57
+ "vlm.model.language_model.layers.5.input_layernorm",
58
+ "action_expert.model.layers.11.post_attention_layernorm.dense",
59
+ "action_expert.model.layers.12.input_layernorm.dense",
60
+ "action_expert.model.layers.14.input_layernorm.dense",
61
+ "vlm.model.language_model.layers.14.post_attention_layernorm",
62
+ "vlm.model.language_model.layers.2.input_layernorm",
63
+ "action_expert.model.layers.11.input_layernorm.dense",
64
+ "action_expert.model.layers.14.post_attention_layernorm.dense",
65
+ "vlm.model.language_model.layers.16.post_attention_layernorm",
66
+ "vlm.model.language_model.layers.6.post_attention_layernorm",
67
+ "vlm.model.vision_tower.vision_model.embeddings.position_embedding",
68
+ "vlm.model.language_model.layers.3.post_attention_layernorm",
69
+ "vlm.model.language_model.layers.9.input_layernorm",
70
+ "action_expert.model.layers.0.post_attention_layernorm.dense",
71
+ "suffix_embedder.time_mlp_out",
72
+ "action_expert.model.layers.2.input_layernorm.dense",
73
+ "vlm.model.language_model.layers.0.post_attention_layernorm",
74
+ "vlm.model.language_model.layers.15.post_attention_layernorm",
75
+ "vlm.model.language_model.layers.4.post_attention_layernorm",
76
+ "vlm.model.language_model.layers.10.post_attention_layernorm",
77
+ "action_expert.model.layers.5.post_attention_layernorm.dense",
78
+ "action_expert.model.layers.1.post_attention_layernorm.dense",
79
+ "suffix_embedder.state_mlp_in",
80
+ "vlm.model.language_model.layers.8.input_layernorm",
81
+ "action_expert.model.layers.4.input_layernorm.dense",
82
+ "action_expert.model.layers.13.input_layernorm.dense",
83
+ "vlm.model.language_model.layers.5.post_attention_layernorm",
84
+ "action_expert.model.layers.12.post_attention_layernorm.dense",
85
+ "action_out_proj",
86
+ "action_expert.model.layers.6.post_attention_layernorm.dense",
87
+ "action_expert.model.layers.7.post_attention_layernorm.dense",
88
+ "vlm.model.language_model.layers.16.input_layernorm",
89
+ "vlm.model.language_model.layers.17.input_layernorm",
90
+ "vlm.model.language_model.layers.9.post_attention_layernorm",
91
+ "vlm.model.language_model.layers.7.post_attention_layernorm",
92
+ "action_expert.model.layers.3.post_attention_layernorm.dense",
93
+ "vlm.model.language_model.layers.11.input_layernorm",
94
+ "vlm.model.language_model.layers.0.input_layernorm",
95
+ "action_expert.model.layers.0.input_layernorm.dense",
96
+ "vlm.model.language_model.layers.3.input_layernorm",
97
+ "action_expert.model.layers.13.post_attention_layernorm.dense",
98
+ "suffix_embedder.action_in_proj",
99
+ "action_expert.model.layers.3.input_layernorm.dense",
100
+ "vlm.model.language_model.layers.12.post_attention_layernorm",
101
+ "vlm.model.language_model.layers.15.input_layernorm",
102
+ "action_expert.model.layers.10.input_layernorm.dense",
103
+ "action_expert.model.layers.9.input_layernorm.dense",
104
+ "action_expert.model.layers.15.post_attention_layernorm.dense",
105
+ "vlm.model.language_model.layers.6.input_layernorm"
106
+ ],
107
+ "peft_type": "LORA",
108
+ "peft_version": "0.18.0",
109
+ "qalora_group_size": 16,
110
+ "r": 16,
111
+ "rank_pattern": {},
112
+ "revision": null,
113
+ "target_modules": [
114
+ "q_proj",
115
+ "k_proj",
116
+ "o_proj",
117
+ "fc1",
118
+ "gate_proj",
119
+ "up_proj",
120
+ "fc2",
121
+ "v_proj",
122
+ "down_proj",
123
+ "out_proj"
124
+ ],
125
+ "target_parameters": null,
126
+ "task_type": "FEATURE_EXTRACTION",
127
+ "trainable_token_indices": null,
128
+ "use_dora": false,
129
+ "use_qalora": false,
130
+ "use_rslora": false
131
+ }
015000/pretrained_model/lora_adapters/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ed0cf71fb10f01cf8a6d7fa0c7f5bc1864304d15bf9f31a6fc35498ca2eb82f6
3
+ size 598447496
015000/pretrained_model/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d5b3c1f01147d044e247e5cfee4d13e73a858fc31fce743c2984f50aded3ad69
3
+ size 7481487056
015000/pretrained_model/train_config.json ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": {
3
+ "repo_id": "data/pickup0102_eepose",
4
+ "root": "/data/LumosVLA/vlash/eepose/exp_4in1_towelfold0203_picup1230_ee6d",
5
+ "episodes": null,
6
+ "image_transforms": {
7
+ "enable": false,
8
+ "max_num_transforms": 3,
9
+ "random_order": false,
10
+ "tfs": {
11
+ "brightness": {
12
+ "weight": 1.0,
13
+ "type": "ColorJitter",
14
+ "kwargs": {
15
+ "brightness": [
16
+ 0.8,
17
+ 1.2
18
+ ]
19
+ }
20
+ },
21
+ "contrast": {
22
+ "weight": 1.0,
23
+ "type": "ColorJitter",
24
+ "kwargs": {
25
+ "contrast": [
26
+ 0.8,
27
+ 1.2
28
+ ]
29
+ }
30
+ },
31
+ "saturation": {
32
+ "weight": 1.0,
33
+ "type": "ColorJitter",
34
+ "kwargs": {
35
+ "saturation": [
36
+ 0.5,
37
+ 1.5
38
+ ]
39
+ }
40
+ },
41
+ "hue": {
42
+ "weight": 1.0,
43
+ "type": "ColorJitter",
44
+ "kwargs": {
45
+ "hue": [
46
+ -0.05,
47
+ 0.05
48
+ ]
49
+ }
50
+ },
51
+ "sharpness": {
52
+ "weight": 1.0,
53
+ "type": "SharpnessJitter",
54
+ "kwargs": {
55
+ "sharpness": [
56
+ 0.5,
57
+ 1.5
58
+ ]
59
+ }
60
+ },
61
+ "affine": {
62
+ "weight": 1.0,
63
+ "type": "RandomAffine",
64
+ "kwargs": {
65
+ "degrees": [
66
+ -5.0,
67
+ 5.0
68
+ ],
69
+ "translate": [
70
+ 0.05,
71
+ 0.05
72
+ ]
73
+ }
74
+ }
75
+ }
76
+ },
77
+ "revision": null,
78
+ "use_imagenet_stats": false,
79
+ "video_backend": "torchcodec",
80
+ "streaming": false
81
+ },
82
+ "env": null,
83
+ "policy": {
84
+ "type": "pi05",
85
+ "n_obs_steps": 1,
86
+ "input_features": {
87
+ "observation.state": {
88
+ "type": "STATE",
89
+ "shape": [
90
+ 20
91
+ ]
92
+ },
93
+ "observation.state_eepose": {
94
+ "type": "STATE",
95
+ "shape": [
96
+ 14
97
+ ]
98
+ },
99
+ "observation.images.cam_high": {
100
+ "type": "VISUAL",
101
+ "shape": [
102
+ 3,
103
+ 360,
104
+ 480
105
+ ]
106
+ },
107
+ "observation.images.cam_left_wrist": {
108
+ "type": "VISUAL",
109
+ "shape": [
110
+ 3,
111
+ 360,
112
+ 480
113
+ ]
114
+ },
115
+ "observation.images.cam_right_wrist": {
116
+ "type": "VISUAL",
117
+ "shape": [
118
+ 3,
119
+ 360,
120
+ 480
121
+ ]
122
+ }
123
+ },
124
+ "output_features": {
125
+ "action": {
126
+ "type": "ACTION",
127
+ "shape": [
128
+ 20
129
+ ]
130
+ },
131
+ "action_eepose": {
132
+ "type": "ACTION",
133
+ "shape": [
134
+ 14
135
+ ]
136
+ }
137
+ },
138
+ "device": "cuda",
139
+ "use_amp": false,
140
+ "push_to_hub": false,
141
+ "repo_id": null,
142
+ "private": null,
143
+ "tags": null,
144
+ "license": null,
145
+ "pretrained_path": "checkpoints/pi05_base",
146
+ "paligemma_variant": "gemma_2b",
147
+ "action_expert_variant": "gemma_300m",
148
+ "dtype": "bfloat16",
149
+ "chunk_size": 50,
150
+ "n_action_steps": 50,
151
+ "max_state_dim": 32,
152
+ "max_action_dim": 32,
153
+ "state_cond": true,
154
+ "num_inference_steps": 10,
155
+ "time_sampling_beta_alpha": 1.5,
156
+ "time_sampling_beta_beta": 1.0,
157
+ "time_sampling_scale": 0.999,
158
+ "time_sampling_offset": 0.001,
159
+ "min_period": 0.004,
160
+ "max_period": 4.0,
161
+ "image_resolution": [
162
+ 224,
163
+ 224
164
+ ],
165
+ "empty_cameras": 0,
166
+ "tokenizer_max_length": 200,
167
+ "normalization_mapping": {
168
+ "VISUAL": "IDENTITY",
169
+ "STATE": "MEAN_STD",
170
+ "ACTION": "MEAN_STD"
171
+ },
172
+ "gradient_checkpointing": false,
173
+ "compile_model": false,
174
+ "compile_mode": "max-autotune",
175
+ "fuse_qkv": false,
176
+ "fuse_gate_up": false,
177
+ "optimizer_lr": 2.5e-05,
178
+ "optimizer_betas": [
179
+ 0.9,
180
+ 0.95
181
+ ],
182
+ "optimizer_eps": 1e-08,
183
+ "optimizer_weight_decay": 0.01,
184
+ "optimizer_grad_clip_norm": 1.0,
185
+ "scheduler_warmup_steps": 1000,
186
+ "scheduler_decay_steps": 30000,
187
+ "scheduler_decay_lr": 2.5e-06,
188
+ "vlm_config": {},
189
+ "action_expert_config": {}
190
+ },
191
+ "output_dir": "outputs/train/pi05_async_eepose",
192
+ "job_name": "pi05_async_lora",
193
+ "resume": false,
194
+ "seed": 1000,
195
+ "num_workers": 1,
196
+ "batch_size": 4,
197
+ "steps": 50000,
198
+ "eval_freq": 20000,
199
+ "log_freq": 50,
200
+ "save_checkpoint": true,
201
+ "save_freq": 5000,
202
+ "use_policy_training_preset": false,
203
+ "optimizer": {
204
+ "type": "adamw",
205
+ "lr": 0.0001,
206
+ "weight_decay": 1e-10,
207
+ "grad_clip_norm": 10.0,
208
+ "betas": [
209
+ 0.9,
210
+ 0.95
211
+ ],
212
+ "eps": 1e-08
213
+ },
214
+ "scheduler": {
215
+ "type": "cosine_decay_with_warmup",
216
+ "num_warmup_steps": 1000,
217
+ "num_decay_steps": 50000,
218
+ "peak_lr": 5e-05,
219
+ "decay_lr": 2.5e-06
220
+ },
221
+ "eval": {
222
+ "n_episodes": 50,
223
+ "batch_size": 50,
224
+ "use_async_envs": false
225
+ },
226
+ "wandb": {
227
+ "enable": false,
228
+ "disable_artifact": true,
229
+ "project": "vlash",
230
+ "entity": null,
231
+ "notes": null,
232
+ "run_id": null,
233
+ "mode": null
234
+ },
235
+ "checkpoint_path": null,
236
+ "rename_map": {},
237
+ "max_delay_steps": 8,
238
+ "grad_accum_steps": 4,
239
+ "shared_observation": false,
240
+ "lora": {
241
+ "enable": true,
242
+ "backend": "peft",
243
+ "r": 16,
244
+ "alpha": 16,
245
+ "dropout": 0.0,
246
+ "extra_trainable_modules": [
247
+ "action_in_proj",
248
+ "action_out_proj",
249
+ "time_mlp_in",
250
+ "time_mlp_out",
251
+ "state_proj",
252
+ "state_mlp_in",
253
+ "state_mlp_out",
254
+ "embeddings",
255
+ "input_layernorm",
256
+ "post_attention_layernorm"
257
+ ],
258
+ "target_modules": [
259
+ "q_proj",
260
+ "k_proj",
261
+ "v_proj",
262
+ "o_proj",
263
+ "gate_proj",
264
+ "up_proj",
265
+ "down_proj",
266
+ "out_proj",
267
+ "fc1",
268
+ "fc2"
269
+ ],
270
+ "use_qlora": false,
271
+ "qlora_quant_type": "nf4",
272
+ "qlora_compute_dtype": "bfloat16"
273
+ }
274
+ }
015000/training_state/optimizer_param_groups.json ADDED
@@ -0,0 +1,1804 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [
2
+ {
3
+ "lr": 8.041979948389248e-05,
4
+ "betas": [
5
+ 0.9,
6
+ 0.95
7
+ ],
8
+ "eps": 1e-08,
9
+ "weight_decay": 1e-10,
10
+ "amsgrad": false,
11
+ "maximize": false,
12
+ "foreach": null,
13
+ "capturable": false,
14
+ "differentiable": false,
15
+ "fused": null,
16
+ "decoupled_weight_decay": true,
17
+ "initial_lr": 0.0001,
18
+ "params": [
19
+ 0,
20
+ 1,
21
+ 2,
22
+ 3,
23
+ 4,
24
+ 5,
25
+ 6,
26
+ 7,
27
+ 8,
28
+ 9,
29
+ 10,
30
+ 11,
31
+ 12,
32
+ 13,
33
+ 14,
34
+ 15,
35
+ 16,
36
+ 17,
37
+ 18,
38
+ 19,
39
+ 20,
40
+ 21,
41
+ 22,
42
+ 23,
43
+ 24,
44
+ 25,
45
+ 26,
46
+ 27,
47
+ 28,
48
+ 29,
49
+ 30,
50
+ 31,
51
+ 32,
52
+ 33,
53
+ 34,
54
+ 35,
55
+ 36,
56
+ 37,
57
+ 38,
58
+ 39,
59
+ 40,
60
+ 41,
61
+ 42,
62
+ 43,
63
+ 44,
64
+ 45,
65
+ 46,
66
+ 47,
67
+ 48,
68
+ 49,
69
+ 50,
70
+ 51,
71
+ 52,
72
+ 53,
73
+ 54,
74
+ 55,
75
+ 56,
76
+ 57,
77
+ 58,
78
+ 59,
79
+ 60,
80
+ 61,
81
+ 62,
82
+ 63,
83
+ 64,
84
+ 65,
85
+ 66,
86
+ 67,
87
+ 68,
88
+ 69,
89
+ 70,
90
+ 71,
91
+ 72,
92
+ 73,
93
+ 74,
94
+ 75,
95
+ 76,
96
+ 77,
97
+ 78,
98
+ 79,
99
+ 80,
100
+ 81,
101
+ 82,
102
+ 83,
103
+ 84,
104
+ 85,
105
+ 86,
106
+ 87,
107
+ 88,
108
+ 89,
109
+ 90,
110
+ 91,
111
+ 92,
112
+ 93,
113
+ 94,
114
+ 95,
115
+ 96,
116
+ 97,
117
+ 98,
118
+ 99,
119
+ 100,
120
+ 101,
121
+ 102,
122
+ 103,
123
+ 104,
124
+ 105,
125
+ 106,
126
+ 107,
127
+ 108,
128
+ 109,
129
+ 110,
130
+ 111,
131
+ 112,
132
+ 113,
133
+ 114,
134
+ 115,
135
+ 116,
136
+ 117,
137
+ 118,
138
+ 119,
139
+ 120,
140
+ 121,
141
+ 122,
142
+ 123,
143
+ 124,
144
+ 125,
145
+ 126,
146
+ 127,
147
+ 128,
148
+ 129,
149
+ 130,
150
+ 131,
151
+ 132,
152
+ 133,
153
+ 134,
154
+ 135,
155
+ 136,
156
+ 137,
157
+ 138,
158
+ 139,
159
+ 140,
160
+ 141,
161
+ 142,
162
+ 143,
163
+ 144,
164
+ 145,
165
+ 146,
166
+ 147,
167
+ 148,
168
+ 149,
169
+ 150,
170
+ 151,
171
+ 152,
172
+ 153,
173
+ 154,
174
+ 155,
175
+ 156,
176
+ 157,
177
+ 158,
178
+ 159,
179
+ 160,
180
+ 161,
181
+ 162,
182
+ 163,
183
+ 164,
184
+ 165,
185
+ 166,
186
+ 167,
187
+ 168,
188
+ 169,
189
+ 170,
190
+ 171,
191
+ 172,
192
+ 173,
193
+ 174,
194
+ 175,
195
+ 176,
196
+ 177,
197
+ 178,
198
+ 179,
199
+ 180,
200
+ 181,
201
+ 182,
202
+ 183,
203
+ 184,
204
+ 185,
205
+ 186,
206
+ 187,
207
+ 188,
208
+ 189,
209
+ 190,
210
+ 191,
211
+ 192,
212
+ 193,
213
+ 194,
214
+ 195,
215
+ 196,
216
+ 197,
217
+ 198,
218
+ 199,
219
+ 200,
220
+ 201,
221
+ 202,
222
+ 203,
223
+ 204,
224
+ 205,
225
+ 206,
226
+ 207,
227
+ 208,
228
+ 209,
229
+ 210,
230
+ 211,
231
+ 212,
232
+ 213,
233
+ 214,
234
+ 215,
235
+ 216,
236
+ 217,
237
+ 218,
238
+ 219,
239
+ 220,
240
+ 221,
241
+ 222,
242
+ 223,
243
+ 224,
244
+ 225,
245
+ 226,
246
+ 227,
247
+ 228,
248
+ 229,
249
+ 230,
250
+ 231,
251
+ 232,
252
+ 233,
253
+ 234,
254
+ 235,
255
+ 236,
256
+ 237,
257
+ 238,
258
+ 239,
259
+ 240,
260
+ 241,
261
+ 242,
262
+ 243,
263
+ 244,
264
+ 245,
265
+ 246,
266
+ 247,
267
+ 248,
268
+ 249,
269
+ 250,
270
+ 251,
271
+ 252,
272
+ 253,
273
+ 254,
274
+ 255,
275
+ 256,
276
+ 257,
277
+ 258,
278
+ 259,
279
+ 260,
280
+ 261,
281
+ 262,
282
+ 263,
283
+ 264,
284
+ 265,
285
+ 266,
286
+ 267,
287
+ 268,
288
+ 269,
289
+ 270,
290
+ 271,
291
+ 272,
292
+ 273,
293
+ 274,
294
+ 275,
295
+ 276,
296
+ 277,
297
+ 278,
298
+ 279,
299
+ 280,
300
+ 281,
301
+ 282,
302
+ 283,
303
+ 284,
304
+ 285,
305
+ 286,
306
+ 287,
307
+ 288,
308
+ 289,
309
+ 290,
310
+ 291,
311
+ 292,
312
+ 293,
313
+ 294,
314
+ 295,
315
+ 296,
316
+ 297,
317
+ 298,
318
+ 299,
319
+ 300,
320
+ 301,
321
+ 302,
322
+ 303,
323
+ 304,
324
+ 305,
325
+ 306,
326
+ 307,
327
+ 308,
328
+ 309,
329
+ 310,
330
+ 311,
331
+ 312,
332
+ 313,
333
+ 314,
334
+ 315,
335
+ 316,
336
+ 317,
337
+ 318,
338
+ 319,
339
+ 320,
340
+ 321,
341
+ 322,
342
+ 323,
343
+ 324,
344
+ 325,
345
+ 326,
346
+ 327,
347
+ 328,
348
+ 329,
349
+ 330,
350
+ 331,
351
+ 332,
352
+ 333,
353
+ 334,
354
+ 335,
355
+ 336,
356
+ 337,
357
+ 338,
358
+ 339,
359
+ 340,
360
+ 341,
361
+ 342,
362
+ 343,
363
+ 344,
364
+ 345,
365
+ 346,
366
+ 347,
367
+ 348,
368
+ 349,
369
+ 350,
370
+ 351,
371
+ 352,
372
+ 353,
373
+ 354,
374
+ 355,
375
+ 356,
376
+ 357,
377
+ 358,
378
+ 359,
379
+ 360,
380
+ 361,
381
+ 362,
382
+ 363,
383
+ 364,
384
+ 365,
385
+ 366,
386
+ 367,
387
+ 368,
388
+ 369,
389
+ 370,
390
+ 371,
391
+ 372,
392
+ 373,
393
+ 374,
394
+ 375,
395
+ 376,
396
+ 377,
397
+ 378,
398
+ 379,
399
+ 380,
400
+ 381,
401
+ 382,
402
+ 383,
403
+ 384,
404
+ 385,
405
+ 386,
406
+ 387,
407
+ 388,
408
+ 389,
409
+ 390,
410
+ 391,
411
+ 392,
412
+ 393,
413
+ 394,
414
+ 395,
415
+ 396,
416
+ 397,
417
+ 398,
418
+ 399,
419
+ 400,
420
+ 401,
421
+ 402,
422
+ 403,
423
+ 404,
424
+ 405,
425
+ 406,
426
+ 407,
427
+ 408,
428
+ 409,
429
+ 410,
430
+ 411,
431
+ 412,
432
+ 413,
433
+ 414,
434
+ 415,
435
+ 416,
436
+ 417,
437
+ 418,
438
+ 419,
439
+ 420,
440
+ 421,
441
+ 422,
442
+ 423,
443
+ 424,
444
+ 425,
445
+ 426,
446
+ 427,
447
+ 428,
448
+ 429,
449
+ 430,
450
+ 431,
451
+ 432,
452
+ 433,
453
+ 434,
454
+ 435,
455
+ 436,
456
+ 437,
457
+ 438,
458
+ 439,
459
+ 440,
460
+ 441,
461
+ 442,
462
+ 443,
463
+ 444,
464
+ 445,
465
+ 446,
466
+ 447,
467
+ 448,
468
+ 449,
469
+ 450,
470
+ 451,
471
+ 452,
472
+ 453,
473
+ 454,
474
+ 455,
475
+ 456,
476
+ 457,
477
+ 458,
478
+ 459,
479
+ 460,
480
+ 461,
481
+ 462,
482
+ 463,
483
+ 464,
484
+ 465,
485
+ 466,
486
+ 467,
487
+ 468,
488
+ 469,
489
+ 470,
490
+ 471,
491
+ 472,
492
+ 473,
493
+ 474,
494
+ 475,
495
+ 476,
496
+ 477,
497
+ 478,
498
+ 479,
499
+ 480,
500
+ 481,
501
+ 482,
502
+ 483,
503
+ 484,
504
+ 485,
505
+ 486,
506
+ 487,
507
+ 488,
508
+ 489,
509
+ 490,
510
+ 491,
511
+ 492,
512
+ 493,
513
+ 494,
514
+ 495,
515
+ 496,
516
+ 497,
517
+ 498,
518
+ 499,
519
+ 500,
520
+ 501,
521
+ 502,
522
+ 503,
523
+ 504,
524
+ 505,
525
+ 506,
526
+ 507,
527
+ 508,
528
+ 509,
529
+ 510,
530
+ 511,
531
+ 512,
532
+ 513,
533
+ 514,
534
+ 515,
535
+ 516,
536
+ 517,
537
+ 518,
538
+ 519,
539
+ 520,
540
+ 521,
541
+ 522,
542
+ 523,
543
+ 524,
544
+ 525,
545
+ 526,
546
+ 527,
547
+ 528,
548
+ 529,
549
+ 530,
550
+ 531,
551
+ 532,
552
+ 533,
553
+ 534,
554
+ 535,
555
+ 536,
556
+ 537,
557
+ 538,
558
+ 539,
559
+ 540,
560
+ 541,
561
+ 542,
562
+ 543,
563
+ 544,
564
+ 545,
565
+ 546,
566
+ 547,
567
+ 548,
568
+ 549,
569
+ 550,
570
+ 551,
571
+ 552,
572
+ 553,
573
+ 554,
574
+ 555,
575
+ 556,
576
+ 557,
577
+ 558,
578
+ 559,
579
+ 560,
580
+ 561,
581
+ 562,
582
+ 563,
583
+ 564,
584
+ 565,
585
+ 566,
586
+ 567,
587
+ 568,
588
+ 569,
589
+ 570,
590
+ 571,
591
+ 572,
592
+ 573,
593
+ 574,
594
+ 575,
595
+ 576,
596
+ 577,
597
+ 578,
598
+ 579,
599
+ 580,
600
+ 581,
601
+ 582,
602
+ 583,
603
+ 584,
604
+ 585,
605
+ 586,
606
+ 587,
607
+ 588,
608
+ 589,
609
+ 590,
610
+ 591,
611
+ 592,
612
+ 593,
613
+ 594,
614
+ 595,
615
+ 596,
616
+ 597,
617
+ 598,
618
+ 599,
619
+ 600,
620
+ 601,
621
+ 602,
622
+ 603,
623
+ 604,
624
+ 605,
625
+ 606,
626
+ 607,
627
+ 608,
628
+ 609,
629
+ 610,
630
+ 611,
631
+ 612,
632
+ 613,
633
+ 614,
634
+ 615,
635
+ 616,
636
+ 617,
637
+ 618,
638
+ 619,
639
+ 620,
640
+ 621,
641
+ 622,
642
+ 623,
643
+ 624,
644
+ 625,
645
+ 626,
646
+ 627,
647
+ 628,
648
+ 629,
649
+ 630,
650
+ 631,
651
+ 632,
652
+ 633,
653
+ 634,
654
+ 635,
655
+ 636,
656
+ 637,
657
+ 638,
658
+ 639,
659
+ 640,
660
+ 641,
661
+ 642,
662
+ 643,
663
+ 644,
664
+ 645,
665
+ 646,
666
+ 647,
667
+ 648,
668
+ 649,
669
+ 650,
670
+ 651,
671
+ 652,
672
+ 653,
673
+ 654,
674
+ 655,
675
+ 656,
676
+ 657,
677
+ 658,
678
+ 659,
679
+ 660,
680
+ 661,
681
+ 662,
682
+ 663,
683
+ 664,
684
+ 665,
685
+ 666,
686
+ 667,
687
+ 668,
688
+ 669,
689
+ 670,
690
+ 671,
691
+ 672,
692
+ 673,
693
+ 674,
694
+ 675,
695
+ 676,
696
+ 677,
697
+ 678,
698
+ 679,
699
+ 680,
700
+ 681,
701
+ 682,
702
+ 683,
703
+ 684,
704
+ 685,
705
+ 686,
706
+ 687,
707
+ 688,
708
+ 689,
709
+ 690,
710
+ 691,
711
+ 692,
712
+ 693,
713
+ 694,
714
+ 695,
715
+ 696,
716
+ 697,
717
+ 698,
718
+ 699,
719
+ 700,
720
+ 701,
721
+ 702,
722
+ 703,
723
+ 704,
724
+ 705,
725
+ 706,
726
+ 707,
727
+ 708,
728
+ 709,
729
+ 710,
730
+ 711,
731
+ 712,
732
+ 713,
733
+ 714,
734
+ 715,
735
+ 716,
736
+ 717,
737
+ 718,
738
+ 719,
739
+ 720,
740
+ 721,
741
+ 722,
742
+ 723,
743
+ 724,
744
+ 725,
745
+ 726,
746
+ 727,
747
+ 728,
748
+ 729,
749
+ 730,
750
+ 731,
751
+ 732,
752
+ 733,
753
+ 734,
754
+ 735,
755
+ 736,
756
+ 737,
757
+ 738,
758
+ 739,
759
+ 740,
760
+ 741,
761
+ 742,
762
+ 743,
763
+ 744,
764
+ 745,
765
+ 746,
766
+ 747,
767
+ 748,
768
+ 749,
769
+ 750,
770
+ 751,
771
+ 752,
772
+ 753,
773
+ 754,
774
+ 755,
775
+ 756,
776
+ 757,
777
+ 758,
778
+ 759,
779
+ 760,
780
+ 761,
781
+ 762,
782
+ 763,
783
+ 764,
784
+ 765,
785
+ 766,
786
+ 767,
787
+ 768,
788
+ 769,
789
+ 770,
790
+ 771,
791
+ 772,
792
+ 773,
793
+ 774,
794
+ 775,
795
+ 776,
796
+ 777,
797
+ 778,
798
+ 779,
799
+ 780,
800
+ 781,
801
+ 782,
802
+ 783,
803
+ 784,
804
+ 785,
805
+ 786,
806
+ 787,
807
+ 788,
808
+ 789,
809
+ 790,
810
+ 791,
811
+ 792,
812
+ 793,
813
+ 794,
814
+ 795,
815
+ 796,
816
+ 797,
817
+ 798,
818
+ 799,
819
+ 800,
820
+ 801,
821
+ 802,
822
+ 803,
823
+ 804,
824
+ 805,
825
+ 806,
826
+ 807,
827
+ 808,
828
+ 809,
829
+ 810,
830
+ 811,
831
+ 812,
832
+ 813,
833
+ 814,
834
+ 815,
835
+ 816,
836
+ 817,
837
+ 818,
838
+ 819,
839
+ 820,
840
+ 821,
841
+ 822,
842
+ 823,
843
+ 824,
844
+ 825,
845
+ 826,
846
+ 827,
847
+ 828,
848
+ 829,
849
+ 830,
850
+ 831,
851
+ 832,
852
+ 833,
853
+ 834,
854
+ 835,
855
+ 836,
856
+ 837,
857
+ 838,
858
+ 839,
859
+ 840,
860
+ 841,
861
+ 842,
862
+ 843,
863
+ 844,
864
+ 845,
865
+ 846,
866
+ 847,
867
+ 848,
868
+ 849,
869
+ 850,
870
+ 851,
871
+ 852,
872
+ 853,
873
+ 854,
874
+ 855,
875
+ 856,
876
+ 857,
877
+ 858,
878
+ 859,
879
+ 860,
880
+ 861,
881
+ 862,
882
+ 863,
883
+ 864,
884
+ 865,
885
+ 866,
886
+ 867,
887
+ 868,
888
+ 869,
889
+ 870,
890
+ 871,
891
+ 872,
892
+ 873,
893
+ 874,
894
+ 875,
895
+ 876,
896
+ 877,
897
+ 878,
898
+ 879,
899
+ 880,
900
+ 881,
901
+ 882,
902
+ 883,
903
+ 884,
904
+ 885,
905
+ 886,
906
+ 887,
907
+ 888,
908
+ 889,
909
+ 890,
910
+ 891,
911
+ 892,
912
+ 893,
913
+ 894,
914
+ 895,
915
+ 896,
916
+ 897,
917
+ 898,
918
+ 899,
919
+ 900,
920
+ 901,
921
+ 902,
922
+ 903,
923
+ 904,
924
+ 905,
925
+ 906,
926
+ 907,
927
+ 908,
928
+ 909,
929
+ 910,
930
+ 911,
931
+ 912,
932
+ 913,
933
+ 914,
934
+ 915,
935
+ 916,
936
+ 917,
937
+ 918,
938
+ 919,
939
+ 920,
940
+ 921,
941
+ 922,
942
+ 923,
943
+ 924,
944
+ 925,
945
+ 926,
946
+ 927,
947
+ 928,
948
+ 929,
949
+ 930,
950
+ 931,
951
+ 932,
952
+ 933,
953
+ 934,
954
+ 935,
955
+ 936,
956
+ 937,
957
+ 938,
958
+ 939,
959
+ 940,
960
+ 941,
961
+ 942,
962
+ 943,
963
+ 944,
964
+ 945,
965
+ 946,
966
+ 947,
967
+ 948,
968
+ 949,
969
+ 950,
970
+ 951,
971
+ 952,
972
+ 953,
973
+ 954,
974
+ 955,
975
+ 956,
976
+ 957,
977
+ 958,
978
+ 959,
979
+ 960,
980
+ 961,
981
+ 962,
982
+ 963,
983
+ 964,
984
+ 965,
985
+ 966,
986
+ 967,
987
+ 968,
988
+ 969,
989
+ 970,
990
+ 971,
991
+ 972,
992
+ 973,
993
+ 974,
994
+ 975,
995
+ 976,
996
+ 977,
997
+ 978,
998
+ 979,
999
+ 980,
1000
+ 981,
1001
+ 982,
1002
+ 983,
1003
+ 984,
1004
+ 985,
1005
+ 986,
1006
+ 987,
1007
+ 988,
1008
+ 989,
1009
+ 990,
1010
+ 991,
1011
+ 992,
1012
+ 993,
1013
+ 994,
1014
+ 995,
1015
+ 996,
1016
+ 997,
1017
+ 998,
1018
+ 999,
1019
+ 1000,
1020
+ 1001,
1021
+ 1002,
1022
+ 1003,
1023
+ 1004,
1024
+ 1005,
1025
+ 1006,
1026
+ 1007,
1027
+ 1008,
1028
+ 1009,
1029
+ 1010,
1030
+ 1011,
1031
+ 1012,
1032
+ 1013,
1033
+ 1014,
1034
+ 1015,
1035
+ 1016,
1036
+ 1017,
1037
+ 1018,
1038
+ 1019,
1039
+ 1020,
1040
+ 1021,
1041
+ 1022,
1042
+ 1023,
1043
+ 1024,
1044
+ 1025,
1045
+ 1026,
1046
+ 1027,
1047
+ 1028,
1048
+ 1029,
1049
+ 1030,
1050
+ 1031,
1051
+ 1032,
1052
+ 1033,
1053
+ 1034,
1054
+ 1035,
1055
+ 1036,
1056
+ 1037,
1057
+ 1038,
1058
+ 1039,
1059
+ 1040,
1060
+ 1041,
1061
+ 1042,
1062
+ 1043,
1063
+ 1044,
1064
+ 1045,
1065
+ 1046,
1066
+ 1047,
1067
+ 1048,
1068
+ 1049,
1069
+ 1050,
1070
+ 1051,
1071
+ 1052,
1072
+ 1053,
1073
+ 1054,
1074
+ 1055,
1075
+ 1056,
1076
+ 1057,
1077
+ 1058,
1078
+ 1059,
1079
+ 1060,
1080
+ 1061,
1081
+ 1062,
1082
+ 1063,
1083
+ 1064,
1084
+ 1065,
1085
+ 1066,
1086
+ 1067,
1087
+ 1068,
1088
+ 1069,
1089
+ 1070,
1090
+ 1071,
1091
+ 1072,
1092
+ 1073,
1093
+ 1074,
1094
+ 1075,
1095
+ 1076,
1096
+ 1077,
1097
+ 1078,
1098
+ 1079,
1099
+ 1080,
1100
+ 1081,
1101
+ 1082,
1102
+ 1083,
1103
+ 1084,
1104
+ 1085,
1105
+ 1086,
1106
+ 1087,
1107
+ 1088,
1108
+ 1089,
1109
+ 1090,
1110
+ 1091,
1111
+ 1092,
1112
+ 1093,
1113
+ 1094,
1114
+ 1095,
1115
+ 1096,
1116
+ 1097,
1117
+ 1098,
1118
+ 1099,
1119
+ 1100,
1120
+ 1101,
1121
+ 1102,
1122
+ 1103,
1123
+ 1104,
1124
+ 1105,
1125
+ 1106,
1126
+ 1107,
1127
+ 1108,
1128
+ 1109,
1129
+ 1110,
1130
+ 1111,
1131
+ 1112,
1132
+ 1113,
1133
+ 1114,
1134
+ 1115,
1135
+ 1116,
1136
+ 1117,
1137
+ 1118,
1138
+ 1119,
1139
+ 1120,
1140
+ 1121,
1141
+ 1122,
1142
+ 1123,
1143
+ 1124,
1144
+ 1125,
1145
+ 1126,
1146
+ 1127,
1147
+ 1128,
1148
+ 1129,
1149
+ 1130,
1150
+ 1131,
1151
+ 1132,
1152
+ 1133,
1153
+ 1134,
1154
+ 1135,
1155
+ 1136,
1156
+ 1137,
1157
+ 1138,
1158
+ 1139,
1159
+ 1140,
1160
+ 1141,
1161
+ 1142,
1162
+ 1143,
1163
+ 1144,
1164
+ 1145,
1165
+ 1146,
1166
+ 1147,
1167
+ 1148,
1168
+ 1149,
1169
+ 1150,
1170
+ 1151,
1171
+ 1152,
1172
+ 1153,
1173
+ 1154,
1174
+ 1155,
1175
+ 1156,
1176
+ 1157,
1177
+ 1158,
1178
+ 1159,
1179
+ 1160,
1180
+ 1161,
1181
+ 1162,
1182
+ 1163,
1183
+ 1164,
1184
+ 1165,
1185
+ 1166,
1186
+ 1167,
1187
+ 1168,
1188
+ 1169,
1189
+ 1170,
1190
+ 1171,
1191
+ 1172,
1192
+ 1173,
1193
+ 1174,
1194
+ 1175,
1195
+ 1176,
1196
+ 1177,
1197
+ 1178,
1198
+ 1179,
1199
+ 1180,
1200
+ 1181,
1201
+ 1182,
1202
+ 1183,
1203
+ 1184,
1204
+ 1185,
1205
+ 1186,
1206
+ 1187,
1207
+ 1188,
1208
+ 1189,
1209
+ 1190,
1210
+ 1191,
1211
+ 1192,
1212
+ 1193,
1213
+ 1194,
1214
+ 1195,
1215
+ 1196,
1216
+ 1197,
1217
+ 1198,
1218
+ 1199,
1219
+ 1200,
1220
+ 1201,
1221
+ 1202,
1222
+ 1203,
1223
+ 1204,
1224
+ 1205,
1225
+ 1206,
1226
+ 1207,
1227
+ 1208,
1228
+ 1209,
1229
+ 1210,
1230
+ 1211,
1231
+ 1212,
1232
+ 1213,
1233
+ 1214,
1234
+ 1215,
1235
+ 1216,
1236
+ 1217,
1237
+ 1218,
1238
+ 1219,
1239
+ 1220,
1240
+ 1221,
1241
+ 1222,
1242
+ 1223,
1243
+ 1224,
1244
+ 1225,
1245
+ 1226,
1246
+ 1227,
1247
+ 1228,
1248
+ 1229,
1249
+ 1230,
1250
+ 1231,
1251
+ 1232,
1252
+ 1233,
1253
+ 1234,
1254
+ 1235,
1255
+ 1236,
1256
+ 1237,
1257
+ 1238,
1258
+ 1239,
1259
+ 1240,
1260
+ 1241,
1261
+ 1242,
1262
+ 1243,
1263
+ 1244,
1264
+ 1245,
1265
+ 1246,
1266
+ 1247,
1267
+ 1248,
1268
+ 1249,
1269
+ 1250,
1270
+ 1251,
1271
+ 1252,
1272
+ 1253,
1273
+ 1254,
1274
+ 1255,
1275
+ 1256,
1276
+ 1257,
1277
+ 1258,
1278
+ 1259,
1279
+ 1260,
1280
+ 1261,
1281
+ 1262,
1282
+ 1263,
1283
+ 1264,
1284
+ 1265,
1285
+ 1266,
1286
+ 1267,
1287
+ 1268,
1288
+ 1269,
1289
+ 1270,
1290
+ 1271,
1291
+ 1272,
1292
+ 1273,
1293
+ 1274,
1294
+ 1275,
1295
+ 1276,
1296
+ 1277,
1297
+ 1278,
1298
+ 1279,
1299
+ 1280,
1300
+ 1281,
1301
+ 1282,
1302
+ 1283,
1303
+ 1284,
1304
+ 1285,
1305
+ 1286,
1306
+ 1287,
1307
+ 1288,
1308
+ 1289,
1309
+ 1290,
1310
+ 1291,
1311
+ 1292,
1312
+ 1293,
1313
+ 1294,
1314
+ 1295,
1315
+ 1296,
1316
+ 1297,
1317
+ 1298,
1318
+ 1299,
1319
+ 1300,
1320
+ 1301,
1321
+ 1302,
1322
+ 1303,
1323
+ 1304,
1324
+ 1305,
1325
+ 1306,
1326
+ 1307,
1327
+ 1308,
1328
+ 1309,
1329
+ 1310,
1330
+ 1311,
1331
+ 1312,
1332
+ 1313,
1333
+ 1314,
1334
+ 1315,
1335
+ 1316,
1336
+ 1317,
1337
+ 1318,
1338
+ 1319,
1339
+ 1320,
1340
+ 1321,
1341
+ 1322,
1342
+ 1323,
1343
+ 1324,
1344
+ 1325,
1345
+ 1326,
1346
+ 1327,
1347
+ 1328,
1348
+ 1329,
1349
+ 1330,
1350
+ 1331,
1351
+ 1332,
1352
+ 1333,
1353
+ 1334,
1354
+ 1335,
1355
+ 1336,
1356
+ 1337,
1357
+ 1338,
1358
+ 1339,
1359
+ 1340,
1360
+ 1341,
1361
+ 1342,
1362
+ 1343,
1363
+ 1344,
1364
+ 1345,
1365
+ 1346,
1366
+ 1347,
1367
+ 1348,
1368
+ 1349,
1369
+ 1350,
1370
+ 1351,
1371
+ 1352,
1372
+ 1353,
1373
+ 1354,
1374
+ 1355,
1375
+ 1356,
1376
+ 1357,
1377
+ 1358,
1378
+ 1359,
1379
+ 1360,
1380
+ 1361,
1381
+ 1362,
1382
+ 1363,
1383
+ 1364,
1384
+ 1365,
1385
+ 1366,
1386
+ 1367,
1387
+ 1368,
1388
+ 1369,
1389
+ 1370,
1390
+ 1371,
1391
+ 1372,
1392
+ 1373,
1393
+ 1374,
1394
+ 1375,
1395
+ 1376,
1396
+ 1377,
1397
+ 1378,
1398
+ 1379,
1399
+ 1380,
1400
+ 1381,
1401
+ 1382,
1402
+ 1383,
1403
+ 1384,
1404
+ 1385,
1405
+ 1386,
1406
+ 1387,
1407
+ 1388,
1408
+ 1389,
1409
+ 1390,
1410
+ 1391,
1411
+ 1392,
1412
+ 1393,
1413
+ 1394,
1414
+ 1395,
1415
+ 1396,
1416
+ 1397,
1417
+ 1398,
1418
+ 1399,
1419
+ 1400,
1420
+ 1401,
1421
+ 1402,
1422
+ 1403,
1423
+ 1404,
1424
+ 1405,
1425
+ 1406,
1426
+ 1407,
1427
+ 1408,
1428
+ 1409,
1429
+ 1410,
1430
+ 1411,
1431
+ 1412,
1432
+ 1413,
1433
+ 1414,
1434
+ 1415,
1435
+ 1416,
1436
+ 1417,
1437
+ 1418,
1438
+ 1419,
1439
+ 1420,
1440
+ 1421,
1441
+ 1422,
1442
+ 1423,
1443
+ 1424,
1444
+ 1425,
1445
+ 1426,
1446
+ 1427,
1447
+ 1428,
1448
+ 1429,
1449
+ 1430,
1450
+ 1431,
1451
+ 1432,
1452
+ 1433,
1453
+ 1434,
1454
+ 1435,
1455
+ 1436,
1456
+ 1437,
1457
+ 1438,
1458
+ 1439,
1459
+ 1440,
1460
+ 1441,
1461
+ 1442,
1462
+ 1443,
1463
+ 1444,
1464
+ 1445,
1465
+ 1446,
1466
+ 1447,
1467
+ 1448,
1468
+ 1449,
1469
+ 1450,
1470
+ 1451,
1471
+ 1452,
1472
+ 1453,
1473
+ 1454,
1474
+ 1455,
1475
+ 1456,
1476
+ 1457,
1477
+ 1458,
1478
+ 1459,
1479
+ 1460,
1480
+ 1461,
1481
+ 1462,
1482
+ 1463,
1483
+ 1464,
1484
+ 1465,
1485
+ 1466,
1486
+ 1467,
1487
+ 1468,
1488
+ 1469,
1489
+ 1470,
1490
+ 1471,
1491
+ 1472,
1492
+ 1473,
1493
+ 1474,
1494
+ 1475,
1495
+ 1476,
1496
+ 1477,
1497
+ 1478,
1498
+ 1479,
1499
+ 1480,
1500
+ 1481,
1501
+ 1482,
1502
+ 1483,
1503
+ 1484,
1504
+ 1485,
1505
+ 1486,
1506
+ 1487,
1507
+ 1488,
1508
+ 1489,
1509
+ 1490,
1510
+ 1491,
1511
+ 1492,
1512
+ 1493,
1513
+ 1494,
1514
+ 1495,
1515
+ 1496,
1516
+ 1497,
1517
+ 1498,
1518
+ 1499,
1519
+ 1500,
1520
+ 1501,
1521
+ 1502,
1522
+ 1503,
1523
+ 1504,
1524
+ 1505,
1525
+ 1506,
1526
+ 1507,
1527
+ 1508,
1528
+ 1509,
1529
+ 1510,
1530
+ 1511,
1531
+ 1512,
1532
+ 1513,
1533
+ 1514,
1534
+ 1515,
1535
+ 1516,
1536
+ 1517,
1537
+ 1518,
1538
+ 1519,
1539
+ 1520,
1540
+ 1521,
1541
+ 1522,
1542
+ 1523,
1543
+ 1524,
1544
+ 1525,
1545
+ 1526,
1546
+ 1527,
1547
+ 1528,
1548
+ 1529,
1549
+ 1530,
1550
+ 1531,
1551
+ 1532,
1552
+ 1533,
1553
+ 1534,
1554
+ 1535,
1555
+ 1536,
1556
+ 1537,
1557
+ 1538,
1558
+ 1539,
1559
+ 1540,
1560
+ 1541,
1561
+ 1542,
1562
+ 1543,
1563
+ 1544,
1564
+ 1545,
1565
+ 1546,
1566
+ 1547,
1567
+ 1548,
1568
+ 1549,
1569
+ 1550,
1570
+ 1551,
1571
+ 1552,
1572
+ 1553,
1573
+ 1554,
1574
+ 1555,
1575
+ 1556,
1576
+ 1557,
1577
+ 1558,
1578
+ 1559,
1579
+ 1560,
1580
+ 1561,
1581
+ 1562,
1582
+ 1563,
1583
+ 1564,
1584
+ 1565,
1585
+ 1566,
1586
+ 1567,
1587
+ 1568,
1588
+ 1569,
1589
+ 1570,
1590
+ 1571,
1591
+ 1572,
1592
+ 1573,
1593
+ 1574,
1594
+ 1575,
1595
+ 1576,
1596
+ 1577,
1597
+ 1578,
1598
+ 1579,
1599
+ 1580,
1600
+ 1581,
1601
+ 1582,
1602
+ 1583,
1603
+ 1584,
1604
+ 1585,
1605
+ 1586,
1606
+ 1587,
1607
+ 1588,
1608
+ 1589,
1609
+ 1590,
1610
+ 1591,
1611
+ 1592,
1612
+ 1593,
1613
+ 1594,
1614
+ 1595,
1615
+ 1596,
1616
+ 1597,
1617
+ 1598,
1618
+ 1599,
1619
+ 1600,
1620
+ 1601,
1621
+ 1602,
1622
+ 1603,
1623
+ 1604,
1624
+ 1605,
1625
+ 1606,
1626
+ 1607,
1627
+ 1608,
1628
+ 1609,
1629
+ 1610,
1630
+ 1611,
1631
+ 1612,
1632
+ 1613,
1633
+ 1614,
1634
+ 1615,
1635
+ 1616,
1636
+ 1617,
1637
+ 1618,
1638
+ 1619,
1639
+ 1620,
1640
+ 1621,
1641
+ 1622,
1642
+ 1623,
1643
+ 1624,
1644
+ 1625,
1645
+ 1626,
1646
+ 1627,
1647
+ 1628,
1648
+ 1629,
1649
+ 1630,
1650
+ 1631,
1651
+ 1632,
1652
+ 1633,
1653
+ 1634,
1654
+ 1635,
1655
+ 1636,
1656
+ 1637,
1657
+ 1638,
1658
+ 1639,
1659
+ 1640,
1660
+ 1641,
1661
+ 1642,
1662
+ 1643,
1663
+ 1644,
1664
+ 1645,
1665
+ 1646,
1666
+ 1647,
1667
+ 1648,
1668
+ 1649,
1669
+ 1650,
1670
+ 1651,
1671
+ 1652,
1672
+ 1653,
1673
+ 1654,
1674
+ 1655,
1675
+ 1656,
1676
+ 1657,
1677
+ 1658,
1678
+ 1659,
1679
+ 1660,
1680
+ 1661,
1681
+ 1662,
1682
+ 1663,
1683
+ 1664,
1684
+ 1665,
1685
+ 1666,
1686
+ 1667,
1687
+ 1668,
1688
+ 1669,
1689
+ 1670,
1690
+ 1671,
1691
+ 1672,
1692
+ 1673,
1693
+ 1674,
1694
+ 1675,
1695
+ 1676,
1696
+ 1677,
1697
+ 1678,
1698
+ 1679,
1699
+ 1680,
1700
+ 1681,
1701
+ 1682,
1702
+ 1683,
1703
+ 1684,
1704
+ 1685,
1705
+ 1686,
1706
+ 1687,
1707
+ 1688,
1708
+ 1689,
1709
+ 1690,
1710
+ 1691,
1711
+ 1692,
1712
+ 1693,
1713
+ 1694,
1714
+ 1695,
1715
+ 1696,
1716
+ 1697,
1717
+ 1698,
1718
+ 1699,
1719
+ 1700,
1720
+ 1701,
1721
+ 1702,
1722
+ 1703,
1723
+ 1704,
1724
+ 1705,
1725
+ 1706,
1726
+ 1707,
1727
+ 1708,
1728
+ 1709,
1729
+ 1710,
1730
+ 1711,
1731
+ 1712,
1732
+ 1713,
1733
+ 1714,
1734
+ 1715,
1735
+ 1716,
1736
+ 1717,
1737
+ 1718,
1738
+ 1719,
1739
+ 1720,
1740
+ 1721,
1741
+ 1722,
1742
+ 1723,
1743
+ 1724,
1744
+ 1725,
1745
+ 1726,
1746
+ 1727,
1747
+ 1728,
1748
+ 1729,
1749
+ 1730,
1750
+ 1731,
1751
+ 1732,
1752
+ 1733,
1753
+ 1734,
1754
+ 1735,
1755
+ 1736,
1756
+ 1737,
1757
+ 1738,
1758
+ 1739,
1759
+ 1740,
1760
+ 1741,
1761
+ 1742,
1762
+ 1743,
1763
+ 1744,
1764
+ 1745,
1765
+ 1746,
1766
+ 1747,
1767
+ 1748,
1768
+ 1749,
1769
+ 1750,
1770
+ 1751,
1771
+ 1752,
1772
+ 1753,
1773
+ 1754,
1774
+ 1755,
1775
+ 1756,
1776
+ 1757,
1777
+ 1758,
1778
+ 1759,
1779
+ 1760,
1780
+ 1761,
1781
+ 1762,
1782
+ 1763,
1783
+ 1764,
1784
+ 1765,
1785
+ 1766,
1786
+ 1767,
1787
+ 1768,
1788
+ 1769,
1789
+ 1770,
1790
+ 1771,
1791
+ 1772,
1792
+ 1773,
1793
+ 1774,
1794
+ 1775,
1795
+ 1776,
1796
+ 1777,
1797
+ 1778,
1798
+ 1779,
1799
+ 1780,
1800
+ 1781,
1801
+ 1782
1802
+ ]
1803
+ }
1804
+ ]
015000/training_state/optimizer_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d1b47dc13fcc8bf9eb4e7d84461f45ea0a567cc977f42b20317a2f32109c0ef0
3
+ size 1086429180
015000/training_state/rng_state.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dcd58de9a87f8a3fcc9c4671c9cb995d70ecdc41851f49de868c2488a777675d
3
+ size 15708
015000/training_state/scheduler_state.json ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "base_lrs": [
3
+ 0.0001
4
+ ],
5
+ "last_epoch": 15000,
6
+ "_step_count": 15001,
7
+ "_get_lr_called_within_step": false,
8
+ "_last_lr": [
9
+ 8.041979948389248e-05
10
+ ],
11
+ "lr_lambdas": [
12
+ null
13
+ ]
14
+ }
015000/training_state/training_step.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ {
2
+ "step": 15000
3
+ }