aliangdw commited on
Commit
01f02fa
·
0 Parent(s):

Duplicate from aliangdw/rfm_qwen4b_pref_prog_succ_8frames_all_discrete_10bins_part2

Browse files
.gitattributes ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
+ *.model filter=lfs diff=lfs merge=lfs -text
13
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
14
+ *.npy filter=lfs diff=lfs merge=lfs -text
15
+ *.npz filter=lfs diff=lfs merge=lfs -text
16
+ *.onnx filter=lfs diff=lfs merge=lfs -text
17
+ *.ot filter=lfs diff=lfs merge=lfs -text
18
+ *.parquet filter=lfs diff=lfs merge=lfs -text
19
+ *.pb filter=lfs diff=lfs merge=lfs -text
20
+ *.pickle filter=lfs diff=lfs merge=lfs -text
21
+ *.pkl filter=lfs diff=lfs merge=lfs -text
22
+ *.pt filter=lfs diff=lfs merge=lfs -text
23
+ *.pth filter=lfs diff=lfs merge=lfs -text
24
+ *.rar filter=lfs diff=lfs merge=lfs -text
25
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
26
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
28
+ *.tar filter=lfs diff=lfs merge=lfs -text
29
+ *.tflite filter=lfs diff=lfs merge=lfs -text
30
+ *.tgz filter=lfs diff=lfs merge=lfs -text
31
+ *.wasm filter=lfs diff=lfs merge=lfs -text
32
+ *.xz filter=lfs diff=lfs merge=lfs -text
33
+ *.zip filter=lfs diff=lfs merge=lfs -text
34
+ *.zst filter=lfs diff=lfs merge=lfs -text
35
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: Qwen/Qwen3-VL-4B-Instruct
4
+ tags:
5
+ - reward_model
6
+ - rfm
7
+ - preference_comparisons
8
+ library_name: transformers
9
+ ---
10
+
11
+ # aliangdw/rfm_qwen4b_pref_prog_succ_8frames_all_discrete_10bins_part2
12
+
13
+ ## Model Details
14
+
15
+ - **Base Model**: Qwen/Qwen3-VL-4B-Instruct
16
+ - **Model Type**: qwen3_vl
17
+
18
+ ## Training Run
19
+
20
+ - **Wandb Run**: [ant_rfm_qwen4b_4gpu_bs16_pref_prog_succ_8_frames_all_discrete_10_bins_part2](https://wandb.ai/clvr/rfm/runs/wydywqsb)
21
+ - **Wandb ID**: `wydywqsb`
22
+ - **Project**: rfm
23
+ - **Notes**: all run with prog_token per frame, qwen 4b, discrete progress, 10 bins
24
+
25
+ ## Citation
26
+
27
+ If you use this model, please cite:
config.json ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "RFM"
4
+ ],
5
+ "dtype": "bfloat16",
6
+ "image_token_id": 151655,
7
+ "model_type": "qwen3_vl",
8
+ "pad_token_id": 151654,
9
+ "text_config": {
10
+ "attention_bias": false,
11
+ "attention_dropout": 0.0,
12
+ "bos_token_id": 151643,
13
+ "dtype": "bfloat16",
14
+ "eos_token_id": 151645,
15
+ "head_dim": 128,
16
+ "hidden_act": "silu",
17
+ "hidden_size": 2560,
18
+ "initializer_range": 0.02,
19
+ "intermediate_size": 9728,
20
+ "max_position_embeddings": 262144,
21
+ "model_type": "qwen3_vl_text",
22
+ "num_attention_heads": 32,
23
+ "num_hidden_layers": 36,
24
+ "num_key_value_heads": 8,
25
+ "rms_norm_eps": 1e-06,
26
+ "rope_scaling": {
27
+ "mrope_interleaved": true,
28
+ "mrope_section": [
29
+ 24,
30
+ 20,
31
+ 20
32
+ ],
33
+ "rope_type": "default"
34
+ },
35
+ "rope_theta": 5000000,
36
+ "tie_word_embeddings": true,
37
+ "use_cache": true,
38
+ "vocab_size": 151674
39
+ },
40
+ "tie_word_embeddings": true,
41
+ "transformers_version": "4.57.2",
42
+ "unsloth_fixed": true,
43
+ "unsloth_version": "2025.10.11",
44
+ "video_token_id": 151656,
45
+ "vision_config": {
46
+ "deepstack_visual_indexes": [
47
+ 5,
48
+ 11,
49
+ 17
50
+ ],
51
+ "depth": 24,
52
+ "dtype": "bfloat16",
53
+ "hidden_act": "gelu_pytorch_tanh",
54
+ "hidden_size": 1024,
55
+ "in_channels": 3,
56
+ "initializer_range": 0.02,
57
+ "intermediate_size": 4096,
58
+ "model_type": "qwen3_vl",
59
+ "num_heads": 16,
60
+ "num_position_embeddings": 2304,
61
+ "out_hidden_size": 2560,
62
+ "patch_size": 16,
63
+ "spatial_merge_size": 2,
64
+ "temporal_patch_size": 2
65
+ },
66
+ "vision_end_token_id": 151653,
67
+ "vision_start_token_id": 151652
68
+ }
config.yaml ADDED
@@ -0,0 +1,198 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ custom_eval:
2
+ comparisons_per_task: 5
3
+ confusion_matrix:
4
+ - mw
5
+ custom_eval_random_seed: 42
6
+ eval_types:
7
+ - reward_alignment
8
+ - policy_ranking
9
+ max_comparisons: null
10
+ num_examples_per_quality_pr: 5
11
+ num_partial_successes: 5
12
+ pad_frames: true
13
+ policy_ranking:
14
+ - rfm-1m-ood
15
+ policy_ranking_max_tasks: 100
16
+ quality_preference:
17
+ - mw
18
+ reward_alignment:
19
+ - rfm-1m-id
20
+ - rfm-1m-ood
21
+ reward_alignment_max_trajectories: 10
22
+ similarity_score:
23
+ - aliangdw_metaworld_metaworld_eval
24
+ subsample_n_frames: null
25
+ use_frame_steps: true
26
+ data:
27
+ data_source_weights:
28
+ metaworld_train: 1.0
29
+ molmoact_dataset_household: 1.0
30
+ molmoact_dataset_tabletop: 1.0
31
+ oxe_droid: 1.0
32
+ roboarena: 1.0
33
+ dataloader_num_workers: 8
34
+ dataloader_persistent_workers: true
35
+ dataloader_pin_memory: true
36
+ dataset_preference_ratio: 0.7
37
+ dataset_success_cutoff_file: rfm/data/dataset_success_cutoff.txt
38
+ dataset_type: strategy_first
39
+ eval_datasets:
40
+ - mw
41
+ eval_subset_size: null
42
+ load_embeddings: false
43
+ max_frames: 8
44
+ max_frames_after_preprocessing: 64
45
+ max_success: 1.0
46
+ max_trajectories: -1
47
+ min_frames_per_trajectory: 5
48
+ min_success: 0.5
49
+ partial_success_threshold: 0.2
50
+ predict_last_frame_partial_progress: false
51
+ preference_strategy_ratio:
52
+ - 1.0
53
+ - 1.0
54
+ - 1.0
55
+ - 1.0
56
+ progress_discrete_bins: 10
57
+ progress_loss_type: discrete
58
+ progress_pred_type: absolute_wrt_total_frames
59
+ progress_strategy_ratio:
60
+ - 1.0
61
+ - 1.0
62
+ - 1.0
63
+ - 1.0
64
+ resized_height: null
65
+ resized_width: null
66
+ sample_type_ratio:
67
+ - 1.0
68
+ - 0.0
69
+ - 0.0
70
+ seed: 42
71
+ shuffle: true
72
+ shuffle_progress_frames: false
73
+ similarity_strategy_ratio:
74
+ - 1.0
75
+ - 1.0
76
+ - 1.0
77
+ train_datasets:
78
+ - rfm-1m-id
79
+ traj_same_source_prob: 0.5
80
+ use_data_source_balance: false
81
+ use_multi_image: true
82
+ use_per_frame_progress_token: true
83
+ debug: false
84
+ logging:
85
+ log_level: debug
86
+ log_to:
87
+ - wandb
88
+ save_best:
89
+ greater_is_better:
90
+ - true
91
+ - true
92
+ - true
93
+ - true
94
+ - true
95
+ hub_private: false
96
+ hub_save_every: 1000
97
+ hub_token: null
98
+ keep_top_k: 5
99
+ metric_names:
100
+ - eval_p_rank/kendall_last_utd_so101_clean_top
101
+ - eval_p_rank/kendall_last_usc_xarm
102
+ - eval_p_rank/kendall_last_usc_franka
103
+ - eval_p_rank/kendall_last_rfm_new_mit_franka_nowrist
104
+ - eval_p_rank/kendall_last_usc_trossen
105
+ save_every: 250
106
+ upload_to_hub: false
107
+ save_model: true
108
+ save_processor: true
109
+ wandb_entity: clvr
110
+ wandb_mode: null
111
+ wandb_notes: all run with prog_token per frame, qwen 4b, discrete progress, 10 bins
112
+ wandb_project: rfm
113
+ loss:
114
+ predict_last_frame_progress: false
115
+ progress_discrete_bins: 10
116
+ progress_loss_type: discrete
117
+ success_positive_weight: 1.0
118
+ mode: train
119
+ model:
120
+ average_temporal_patches: true
121
+ base_model_id: Qwen/Qwen3-VL-4B-Instruct
122
+ frame_pooling: mean
123
+ frame_pooling_attn_temperature: 1.0
124
+ model_type: default
125
+ peft_vision_encoder: false
126
+ progress_discrete_bins: 10
127
+ progress_loss_type: discrete
128
+ quantization: false
129
+ rewind: null
130
+ torch_dtype: bfloat16
131
+ train_language_model: true
132
+ train_preference_head: true
133
+ train_progress_head: true
134
+ train_similarity_head: false
135
+ train_success_head: true
136
+ train_vision_encoder: false
137
+ trust_remote_code: true
138
+ use_multi_image: true
139
+ use_peft: false
140
+ use_per_frame_progress_token: true
141
+ use_unsloth: true
142
+ peft:
143
+ bias: none
144
+ lora_alpha: 64
145
+ lora_dropout: 0.05
146
+ peft_vision_encoder: false
147
+ r: 32
148
+ target_modules:
149
+ - q_proj
150
+ - k_proj
151
+ - v_proj
152
+ - o_proj
153
+ - gate_proj
154
+ - up_proj
155
+ - down_proj
156
+ trainer_cls: rfm_heads
157
+ training:
158
+ beta: 0.1
159
+ bf16: true
160
+ custom_eval_steps: 250
161
+ dataloader_num_workers: 8
162
+ dataloader_persistent_workers: true
163
+ dataloader_pin_memory: true
164
+ ddp_bucket_cap_mb: 25
165
+ ddp_find_unused_parameters: false
166
+ do_eval: true
167
+ eval_steps: 250
168
+ evaluation_strategy: steps
169
+ exp_name: ant_rfm_qwen4b_4gpu_bs16_pref_prog_succ_8_frames_all_discrete_10_bins_part2
170
+ fp16: false
171
+ gradient_accumulation_steps: 1
172
+ gradient_checkpointing: true
173
+ learning_rate: 2.0e-05
174
+ logging_steps: 1
175
+ lr_scheduler_type: cosine
176
+ max_grad_norm: 10.0
177
+ max_seq_length: 1024
178
+ max_steps: 15000
179
+ num_gpus: 2
180
+ num_train_epochs: -1
181
+ output_dir: ./logs
182
+ overwrite_output_dir: true
183
+ per_device_eval_batch_size: 16
184
+ per_device_train_batch_size: 16
185
+ predict_pref_progress: true
186
+ predict_pref_sim: false
187
+ predict_sim_progress: false
188
+ prediction_loss_only: true
189
+ remove_unused_columns: false
190
+ resume_from_checkpoint: /gpfs/home/jessezha/scrubbed_storage/reward_fm/logs/ant_rfm_qwen4b_4gpu_bs16_pref_prog_succ_8_frames_all_discrete_10_bins/ckpt-avg-5metrics=0.6973_step=3000
191
+ run_default_eval: false
192
+ save_steps: 200
193
+ save_strategy: 'no'
194
+ vision_encoder_lr: 5.0e-06
195
+ vision_encoder_num_layers: 3
196
+ warmup_ratio: 0.1
197
+ warmup_steps: 0
198
+ weight_decay: 0.01
dataset_random_state.json ADDED
@@ -0,0 +1,1266 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dataset": [
3
+ 3,
4
+ [
5
+ 2147483648,
6
+ 3564348608,
7
+ 1266698288,
8
+ 4212342371,
9
+ 3595291661,
10
+ 3180588708,
11
+ 3037210256,
12
+ 946923017,
13
+ 2565409715,
14
+ 2900535780,
15
+ 924383152,
16
+ 4180157270,
17
+ 4230508198,
18
+ 2039675917,
19
+ 3755350407,
20
+ 2362848650,
21
+ 2818100609,
22
+ 2097423432,
23
+ 524478045,
24
+ 540883378,
25
+ 281170210,
26
+ 1485176884,
27
+ 1493190386,
28
+ 1773214509,
29
+ 380915208,
30
+ 3667698522,
31
+ 2648371337,
32
+ 2961234806,
33
+ 3857480267,
34
+ 1582950522,
35
+ 246289694,
36
+ 3322185604,
37
+ 1944574775,
38
+ 302623699,
39
+ 169865066,
40
+ 1143540808,
41
+ 3733177770,
42
+ 513116636,
43
+ 1411153081,
44
+ 3205493053,
45
+ 768926902,
46
+ 549624109,
47
+ 1470655403,
48
+ 59539609,
49
+ 3678480009,
50
+ 3087139671,
51
+ 1176835859,
52
+ 2078491503,
53
+ 2299934332,
54
+ 1592059249,
55
+ 1062716176,
56
+ 2654193596,
57
+ 3531838733,
58
+ 2661260596,
59
+ 3881209635,
60
+ 2106865768,
61
+ 4154287292,
62
+ 2082185616,
63
+ 2301197011,
64
+ 2177349827,
65
+ 3082181756,
66
+ 1787663536,
67
+ 3714670796,
68
+ 3018262113,
69
+ 1670056238,
70
+ 1856738750,
71
+ 99824592,
72
+ 2279837081,
73
+ 1414647942,
74
+ 3416675731,
75
+ 3458782472,
76
+ 3997022236,
77
+ 468762002,
78
+ 2666158583,
79
+ 953353270,
80
+ 1788980658,
81
+ 3802061067,
82
+ 407586584,
83
+ 1844776834,
84
+ 1906917274,
85
+ 3154715663,
86
+ 3028370222,
87
+ 4156024188,
88
+ 3996363428,
89
+ 80495456,
90
+ 2659800972,
91
+ 2005649973,
92
+ 3818358673,
93
+ 3952623596,
94
+ 2506862371,
95
+ 3282302532,
96
+ 263923435,
97
+ 3384662671,
98
+ 3292439172,
99
+ 3119957588,
100
+ 1224426111,
101
+ 899864150,
102
+ 215262826,
103
+ 1619647231,
104
+ 3347694949,
105
+ 3497868538,
106
+ 2029552053,
107
+ 2992804824,
108
+ 4080010250,
109
+ 2023513186,
110
+ 1885979437,
111
+ 3564622190,
112
+ 3775424270,
113
+ 2297810139,
114
+ 3549449169,
115
+ 2664856277,
116
+ 3274801974,
117
+ 2794883969,
118
+ 980412666,
119
+ 2980215653,
120
+ 2794389321,
121
+ 2816521934,
122
+ 1266970739,
123
+ 542306338,
124
+ 3646225311,
125
+ 3598997630,
126
+ 2111980720,
127
+ 2949252482,
128
+ 2489027658,
129
+ 352815024,
130
+ 11610683,
131
+ 1386663624,
132
+ 2004196796,
133
+ 1161461546,
134
+ 1921293780,
135
+ 2463949525,
136
+ 1647009713,
137
+ 3550093655,
138
+ 2563894064,
139
+ 3486310554,
140
+ 1506105865,
141
+ 243092931,
142
+ 2659437476,
143
+ 4200687059,
144
+ 2284345122,
145
+ 1974438610,
146
+ 3591096528,
147
+ 967119212,
148
+ 3362401375,
149
+ 140678365,
150
+ 311602112,
151
+ 2361740275,
152
+ 2139598582,
153
+ 3632873481,
154
+ 2762232439,
155
+ 4156482318,
156
+ 381637792,
157
+ 3253346525,
158
+ 2492118775,
159
+ 1502434558,
160
+ 3164497290,
161
+ 3550998357,
162
+ 2412448305,
163
+ 2223955385,
164
+ 4122879535,
165
+ 350121793,
166
+ 1835149778,
167
+ 2175117867,
168
+ 989674750,
169
+ 3178241202,
170
+ 3553093569,
171
+ 3470650311,
172
+ 2829698151,
173
+ 3209427769,
174
+ 1779174943,
175
+ 275388428,
176
+ 4044574515,
177
+ 715447260,
178
+ 3180940440,
179
+ 4020772289,
180
+ 1322708567,
181
+ 3189868792,
182
+ 4250485633,
183
+ 716970023,
184
+ 2307550151,
185
+ 1074996711,
186
+ 1217573599,
187
+ 197006094,
188
+ 2178394212,
189
+ 1255233746,
190
+ 4164251484,
191
+ 1405608772,
192
+ 2808160475,
193
+ 1304736088,
194
+ 1796071066,
195
+ 2761748078,
196
+ 3570739698,
197
+ 1616118556,
198
+ 2232868135,
199
+ 3567541936,
200
+ 3470600401,
201
+ 3031621994,
202
+ 3351764214,
203
+ 1359785149,
204
+ 2617497797,
205
+ 3340028190,
206
+ 356162828,
207
+ 2083806068,
208
+ 2503635608,
209
+ 4024838996,
210
+ 2577080371,
211
+ 2897993505,
212
+ 3120733934,
213
+ 905794891,
214
+ 2506078507,
215
+ 4211618666,
216
+ 3777871979,
217
+ 809751414,
218
+ 4080874167,
219
+ 1562977008,
220
+ 3917373055,
221
+ 2132779194,
222
+ 4014249473,
223
+ 4067327082,
224
+ 2582869847,
225
+ 1780081876,
226
+ 1842619106,
227
+ 3381761227,
228
+ 921004274,
229
+ 1393256920,
230
+ 1883566732,
231
+ 2702071861,
232
+ 865327389,
233
+ 1622085203,
234
+ 3021825820,
235
+ 2687061406,
236
+ 1748902923,
237
+ 689023977,
238
+ 308399650,
239
+ 2377287978,
240
+ 1646969411,
241
+ 1051806316,
242
+ 4277884230,
243
+ 2041056290,
244
+ 101134519,
245
+ 2032472116,
246
+ 4112521069,
247
+ 151202901,
248
+ 2773743461,
249
+ 551348559,
250
+ 3476836808,
251
+ 510935951,
252
+ 625057077,
253
+ 3757450756,
254
+ 2977698135,
255
+ 3027776859,
256
+ 2616998041,
257
+ 2773430005,
258
+ 544190486,
259
+ 2241368212,
260
+ 1141105829,
261
+ 1452816309,
262
+ 4199229235,
263
+ 3218013033,
264
+ 4229475816,
265
+ 1659576351,
266
+ 3020348754,
267
+ 1193400518,
268
+ 3208584597,
269
+ 1151197733,
270
+ 2597187966,
271
+ 503065140,
272
+ 2421841572,
273
+ 1437291709,
274
+ 1909275895,
275
+ 2872630545,
276
+ 793588217,
277
+ 3792934707,
278
+ 1784451785,
279
+ 2921385648,
280
+ 1669902526,
281
+ 4189978976,
282
+ 1196986251,
283
+ 434805516,
284
+ 1907541826,
285
+ 2624415034,
286
+ 1687778718,
287
+ 650746582,
288
+ 1949153382,
289
+ 4148493093,
290
+ 841300520,
291
+ 1164202054,
292
+ 4203468658,
293
+ 4106300911,
294
+ 850346789,
295
+ 1715730760,
296
+ 3114661489,
297
+ 2866524548,
298
+ 1360448945,
299
+ 3601318775,
300
+ 1743078223,
301
+ 2413855408,
302
+ 1211895622,
303
+ 325117146,
304
+ 2721152875,
305
+ 1284334485,
306
+ 2446538832,
307
+ 739014618,
308
+ 2237045115,
309
+ 842553465,
310
+ 2538598293,
311
+ 746460793,
312
+ 4010387366,
313
+ 2002655192,
314
+ 4193733112,
315
+ 1194380773,
316
+ 3918217378,
317
+ 1447487475,
318
+ 5659228,
319
+ 3408847694,
320
+ 4190318700,
321
+ 1862549564,
322
+ 781683719,
323
+ 1194618118,
324
+ 755053413,
325
+ 3436011942,
326
+ 2885435303,
327
+ 3081151348,
328
+ 2017642831,
329
+ 1053816502,
330
+ 1086627485,
331
+ 2157296554,
332
+ 110650022,
333
+ 965352898,
334
+ 1003174194,
335
+ 1288956241,
336
+ 4057404871,
337
+ 2965068465,
338
+ 2897064481,
339
+ 2457377317,
340
+ 1879872545,
341
+ 358455290,
342
+ 375086701,
343
+ 3015902095,
344
+ 1676249984,
345
+ 924455526,
346
+ 2084169389,
347
+ 1989014644,
348
+ 1993749926,
349
+ 2009424973,
350
+ 2113340508,
351
+ 3980883273,
352
+ 2915977458,
353
+ 203328382,
354
+ 3020815229,
355
+ 2415050113,
356
+ 4103009585,
357
+ 3700885489,
358
+ 2916647550,
359
+ 1523006503,
360
+ 174302338,
361
+ 2476909338,
362
+ 1969322490,
363
+ 4285741984,
364
+ 1528449097,
365
+ 3355315515,
366
+ 4217241278,
367
+ 599579127,
368
+ 2572243673,
369
+ 3035856735,
370
+ 1539140489,
371
+ 1782314913,
372
+ 4238644287,
373
+ 1746424142,
374
+ 1978148312,
375
+ 2380746849,
376
+ 184941882,
377
+ 1106717981,
378
+ 1720750349,
379
+ 981701307,
380
+ 3953154731,
381
+ 3257809181,
382
+ 2892339376,
383
+ 3339778166,
384
+ 3676936849,
385
+ 87425948,
386
+ 3029257381,
387
+ 2037942523,
388
+ 3807628706,
389
+ 2861474706,
390
+ 1058852346,
391
+ 1322765211,
392
+ 2686046342,
393
+ 2689342655,
394
+ 2303436168,
395
+ 2571627181,
396
+ 1986057734,
397
+ 1183564308,
398
+ 2829677523,
399
+ 1295563975,
400
+ 503126586,
401
+ 2025890348,
402
+ 4179277821,
403
+ 1735262467,
404
+ 981331774,
405
+ 1613447066,
406
+ 1011606109,
407
+ 2000062246,
408
+ 3581448390,
409
+ 3477731384,
410
+ 3641307373,
411
+ 3508544379,
412
+ 2327233491,
413
+ 3931944343,
414
+ 4189052882,
415
+ 2990416380,
416
+ 422406169,
417
+ 202291313,
418
+ 2531006461,
419
+ 4277024116,
420
+ 3815144003,
421
+ 821314585,
422
+ 1344175168,
423
+ 3562834071,
424
+ 1339615445,
425
+ 1831545190,
426
+ 3115548822,
427
+ 743512780,
428
+ 4006999448,
429
+ 3720181735,
430
+ 1012033521,
431
+ 919931041,
432
+ 2628967879,
433
+ 1151876565,
434
+ 1268107129,
435
+ 3674829936,
436
+ 834977846,
437
+ 743987006,
438
+ 3947536548,
439
+ 3706529695,
440
+ 4121073678,
441
+ 2507605742,
442
+ 1595636918,
443
+ 2708047833,
444
+ 2427507331,
445
+ 3868216331,
446
+ 3254240010,
447
+ 2097683411,
448
+ 3279710596,
449
+ 3686819053,
450
+ 1843541720,
451
+ 1683793619,
452
+ 3245287285,
453
+ 3571828776,
454
+ 3733296431,
455
+ 3806747478,
456
+ 1390930605,
457
+ 3860422228,
458
+ 114397037,
459
+ 1931519825,
460
+ 2770684378,
461
+ 1556101783,
462
+ 1436111731,
463
+ 4031950081,
464
+ 562876656,
465
+ 1775895782,
466
+ 612364620,
467
+ 1313509772,
468
+ 4283410242,
469
+ 3252958463,
470
+ 2176555836,
471
+ 3933073367,
472
+ 3013277102,
473
+ 1444071961,
474
+ 3120949516,
475
+ 2824578890,
476
+ 325676929,
477
+ 943677134,
478
+ 1800649256,
479
+ 1721927060,
480
+ 347498719,
481
+ 1435221321,
482
+ 2623572981,
483
+ 1408548470,
484
+ 4145586315,
485
+ 2901889237,
486
+ 1849377952,
487
+ 1239144551,
488
+ 3382598266,
489
+ 2992893897,
490
+ 3738297588,
491
+ 611280106,
492
+ 3897415338,
493
+ 2370299241,
494
+ 1772308583,
495
+ 3697465753,
496
+ 354508058,
497
+ 2702360134,
498
+ 591308331,
499
+ 3524072501,
500
+ 976616000,
501
+ 2563717192,
502
+ 3078266097,
503
+ 1376594703,
504
+ 4209795919,
505
+ 2454412767,
506
+ 2712206031,
507
+ 2963860163,
508
+ 3734324882,
509
+ 2248653800,
510
+ 324872786,
511
+ 3789837448,
512
+ 3779000146,
513
+ 527733939,
514
+ 2844165793,
515
+ 576499681,
516
+ 1618787435,
517
+ 2638888650,
518
+ 57511068,
519
+ 2804627518,
520
+ 2993670030,
521
+ 481402236,
522
+ 2810124845,
523
+ 1416045214,
524
+ 1723694191,
525
+ 1214944572,
526
+ 3188123783,
527
+ 1139185907,
528
+ 3851015362,
529
+ 1719652470,
530
+ 1661343029,
531
+ 3644307578,
532
+ 3564178709,
533
+ 1256656955,
534
+ 46631590,
535
+ 4231317929,
536
+ 3098958589,
537
+ 1834956625,
538
+ 2206185428,
539
+ 3695688374,
540
+ 3647957317,
541
+ 1064098871,
542
+ 1739100906,
543
+ 2579568980,
544
+ 27974051,
545
+ 2617466775,
546
+ 964075233,
547
+ 907049942,
548
+ 4164146575,
549
+ 3377168066,
550
+ 2524828266,
551
+ 1083546008,
552
+ 2992960953,
553
+ 2260789066,
554
+ 1543742095,
555
+ 2843842831,
556
+ 1375722284,
557
+ 3574521313,
558
+ 110842534,
559
+ 2310998251,
560
+ 3076511734,
561
+ 783145600,
562
+ 1287776608,
563
+ 3087144146,
564
+ 305559823,
565
+ 2356293719,
566
+ 3228441476,
567
+ 1678938122,
568
+ 3775814061,
569
+ 1620283952,
570
+ 2512027726,
571
+ 1031432407,
572
+ 962295099,
573
+ 3877418501,
574
+ 968669928,
575
+ 304126693,
576
+ 3711291137,
577
+ 3847527101,
578
+ 494066767,
579
+ 4050229756,
580
+ 4169448589,
581
+ 671763915,
582
+ 1095747781,
583
+ 4006132710,
584
+ 394725957,
585
+ 200521654,
586
+ 2715998750,
587
+ 1477567673,
588
+ 895171901,
589
+ 3370105999,
590
+ 2684157455,
591
+ 4153990023,
592
+ 3966076501,
593
+ 2043374409,
594
+ 144443759,
595
+ 6764556,
596
+ 1611650045,
597
+ 1480956755,
598
+ 1388276468,
599
+ 4136518438,
600
+ 1538041336,
601
+ 266773992,
602
+ 1623357516,
603
+ 2267298390,
604
+ 3183919402,
605
+ 1084292424,
606
+ 2796136160,
607
+ 2413448816,
608
+ 2850375199,
609
+ 3510894040,
610
+ 2644778623,
611
+ 3317288284,
612
+ 3697317540,
613
+ 1465776787,
614
+ 1843489446,
615
+ 1416711171,
616
+ 744701117,
617
+ 1286781349,
618
+ 3748640476,
619
+ 861982119,
620
+ 2377742909,
621
+ 1171768136,
622
+ 2701877439,
623
+ 3839724288,
624
+ 2869791015,
625
+ 2386067954,
626
+ 2629214347,
627
+ 955801623,
628
+ 3831079317,
629
+ 624
630
+ ],
631
+ null
632
+ ],
633
+ "pref_sampler": [
634
+ 3,
635
+ [
636
+ 2147483648,
637
+ 3564348608,
638
+ 1266698288,
639
+ 4212342371,
640
+ 3595291661,
641
+ 3180588708,
642
+ 3037210256,
643
+ 946923017,
644
+ 2565409715,
645
+ 2900535780,
646
+ 924383152,
647
+ 4180157270,
648
+ 4230508198,
649
+ 2039675917,
650
+ 3755350407,
651
+ 2362848650,
652
+ 2818100609,
653
+ 2097423432,
654
+ 524478045,
655
+ 540883378,
656
+ 281170210,
657
+ 1485176884,
658
+ 1493190386,
659
+ 1773214509,
660
+ 380915208,
661
+ 3667698522,
662
+ 2648371337,
663
+ 2961234806,
664
+ 3857480267,
665
+ 1582950522,
666
+ 246289694,
667
+ 3322185604,
668
+ 1944574775,
669
+ 302623699,
670
+ 169865066,
671
+ 1143540808,
672
+ 3733177770,
673
+ 513116636,
674
+ 1411153081,
675
+ 3205493053,
676
+ 768926902,
677
+ 549624109,
678
+ 1470655403,
679
+ 59539609,
680
+ 3678480009,
681
+ 3087139671,
682
+ 1176835859,
683
+ 2078491503,
684
+ 2299934332,
685
+ 1592059249,
686
+ 1062716176,
687
+ 2654193596,
688
+ 3531838733,
689
+ 2661260596,
690
+ 3881209635,
691
+ 2106865768,
692
+ 4154287292,
693
+ 2082185616,
694
+ 2301197011,
695
+ 2177349827,
696
+ 3082181756,
697
+ 1787663536,
698
+ 3714670796,
699
+ 3018262113,
700
+ 1670056238,
701
+ 1856738750,
702
+ 99824592,
703
+ 2279837081,
704
+ 1414647942,
705
+ 3416675731,
706
+ 3458782472,
707
+ 3997022236,
708
+ 468762002,
709
+ 2666158583,
710
+ 953353270,
711
+ 1788980658,
712
+ 3802061067,
713
+ 407586584,
714
+ 1844776834,
715
+ 1906917274,
716
+ 3154715663,
717
+ 3028370222,
718
+ 4156024188,
719
+ 3996363428,
720
+ 80495456,
721
+ 2659800972,
722
+ 2005649973,
723
+ 3818358673,
724
+ 3952623596,
725
+ 2506862371,
726
+ 3282302532,
727
+ 263923435,
728
+ 3384662671,
729
+ 3292439172,
730
+ 3119957588,
731
+ 1224426111,
732
+ 899864150,
733
+ 215262826,
734
+ 1619647231,
735
+ 3347694949,
736
+ 3497868538,
737
+ 2029552053,
738
+ 2992804824,
739
+ 4080010250,
740
+ 2023513186,
741
+ 1885979437,
742
+ 3564622190,
743
+ 3775424270,
744
+ 2297810139,
745
+ 3549449169,
746
+ 2664856277,
747
+ 3274801974,
748
+ 2794883969,
749
+ 980412666,
750
+ 2980215653,
751
+ 2794389321,
752
+ 2816521934,
753
+ 1266970739,
754
+ 542306338,
755
+ 3646225311,
756
+ 3598997630,
757
+ 2111980720,
758
+ 2949252482,
759
+ 2489027658,
760
+ 352815024,
761
+ 11610683,
762
+ 1386663624,
763
+ 2004196796,
764
+ 1161461546,
765
+ 1921293780,
766
+ 2463949525,
767
+ 1647009713,
768
+ 3550093655,
769
+ 2563894064,
770
+ 3486310554,
771
+ 1506105865,
772
+ 243092931,
773
+ 2659437476,
774
+ 4200687059,
775
+ 2284345122,
776
+ 1974438610,
777
+ 3591096528,
778
+ 967119212,
779
+ 3362401375,
780
+ 140678365,
781
+ 311602112,
782
+ 2361740275,
783
+ 2139598582,
784
+ 3632873481,
785
+ 2762232439,
786
+ 4156482318,
787
+ 381637792,
788
+ 3253346525,
789
+ 2492118775,
790
+ 1502434558,
791
+ 3164497290,
792
+ 3550998357,
793
+ 2412448305,
794
+ 2223955385,
795
+ 4122879535,
796
+ 350121793,
797
+ 1835149778,
798
+ 2175117867,
799
+ 989674750,
800
+ 3178241202,
801
+ 3553093569,
802
+ 3470650311,
803
+ 2829698151,
804
+ 3209427769,
805
+ 1779174943,
806
+ 275388428,
807
+ 4044574515,
808
+ 715447260,
809
+ 3180940440,
810
+ 4020772289,
811
+ 1322708567,
812
+ 3189868792,
813
+ 4250485633,
814
+ 716970023,
815
+ 2307550151,
816
+ 1074996711,
817
+ 1217573599,
818
+ 197006094,
819
+ 2178394212,
820
+ 1255233746,
821
+ 4164251484,
822
+ 1405608772,
823
+ 2808160475,
824
+ 1304736088,
825
+ 1796071066,
826
+ 2761748078,
827
+ 3570739698,
828
+ 1616118556,
829
+ 2232868135,
830
+ 3567541936,
831
+ 3470600401,
832
+ 3031621994,
833
+ 3351764214,
834
+ 1359785149,
835
+ 2617497797,
836
+ 3340028190,
837
+ 356162828,
838
+ 2083806068,
839
+ 2503635608,
840
+ 4024838996,
841
+ 2577080371,
842
+ 2897993505,
843
+ 3120733934,
844
+ 905794891,
845
+ 2506078507,
846
+ 4211618666,
847
+ 3777871979,
848
+ 809751414,
849
+ 4080874167,
850
+ 1562977008,
851
+ 3917373055,
852
+ 2132779194,
853
+ 4014249473,
854
+ 4067327082,
855
+ 2582869847,
856
+ 1780081876,
857
+ 1842619106,
858
+ 3381761227,
859
+ 921004274,
860
+ 1393256920,
861
+ 1883566732,
862
+ 2702071861,
863
+ 865327389,
864
+ 1622085203,
865
+ 3021825820,
866
+ 2687061406,
867
+ 1748902923,
868
+ 689023977,
869
+ 308399650,
870
+ 2377287978,
871
+ 1646969411,
872
+ 1051806316,
873
+ 4277884230,
874
+ 2041056290,
875
+ 101134519,
876
+ 2032472116,
877
+ 4112521069,
878
+ 151202901,
879
+ 2773743461,
880
+ 551348559,
881
+ 3476836808,
882
+ 510935951,
883
+ 625057077,
884
+ 3757450756,
885
+ 2977698135,
886
+ 3027776859,
887
+ 2616998041,
888
+ 2773430005,
889
+ 544190486,
890
+ 2241368212,
891
+ 1141105829,
892
+ 1452816309,
893
+ 4199229235,
894
+ 3218013033,
895
+ 4229475816,
896
+ 1659576351,
897
+ 3020348754,
898
+ 1193400518,
899
+ 3208584597,
900
+ 1151197733,
901
+ 2597187966,
902
+ 503065140,
903
+ 2421841572,
904
+ 1437291709,
905
+ 1909275895,
906
+ 2872630545,
907
+ 793588217,
908
+ 3792934707,
909
+ 1784451785,
910
+ 2921385648,
911
+ 1669902526,
912
+ 4189978976,
913
+ 1196986251,
914
+ 434805516,
915
+ 1907541826,
916
+ 2624415034,
917
+ 1687778718,
918
+ 650746582,
919
+ 1949153382,
920
+ 4148493093,
921
+ 841300520,
922
+ 1164202054,
923
+ 4203468658,
924
+ 4106300911,
925
+ 850346789,
926
+ 1715730760,
927
+ 3114661489,
928
+ 2866524548,
929
+ 1360448945,
930
+ 3601318775,
931
+ 1743078223,
932
+ 2413855408,
933
+ 1211895622,
934
+ 325117146,
935
+ 2721152875,
936
+ 1284334485,
937
+ 2446538832,
938
+ 739014618,
939
+ 2237045115,
940
+ 842553465,
941
+ 2538598293,
942
+ 746460793,
943
+ 4010387366,
944
+ 2002655192,
945
+ 4193733112,
946
+ 1194380773,
947
+ 3918217378,
948
+ 1447487475,
949
+ 5659228,
950
+ 3408847694,
951
+ 4190318700,
952
+ 1862549564,
953
+ 781683719,
954
+ 1194618118,
955
+ 755053413,
956
+ 3436011942,
957
+ 2885435303,
958
+ 3081151348,
959
+ 2017642831,
960
+ 1053816502,
961
+ 1086627485,
962
+ 2157296554,
963
+ 110650022,
964
+ 965352898,
965
+ 1003174194,
966
+ 1288956241,
967
+ 4057404871,
968
+ 2965068465,
969
+ 2897064481,
970
+ 2457377317,
971
+ 1879872545,
972
+ 358455290,
973
+ 375086701,
974
+ 3015902095,
975
+ 1676249984,
976
+ 924455526,
977
+ 2084169389,
978
+ 1989014644,
979
+ 1993749926,
980
+ 2009424973,
981
+ 2113340508,
982
+ 3980883273,
983
+ 2915977458,
984
+ 203328382,
985
+ 3020815229,
986
+ 2415050113,
987
+ 4103009585,
988
+ 3700885489,
989
+ 2916647550,
990
+ 1523006503,
991
+ 174302338,
992
+ 2476909338,
993
+ 1969322490,
994
+ 4285741984,
995
+ 1528449097,
996
+ 3355315515,
997
+ 4217241278,
998
+ 599579127,
999
+ 2572243673,
1000
+ 3035856735,
1001
+ 1539140489,
1002
+ 1782314913,
1003
+ 4238644287,
1004
+ 1746424142,
1005
+ 1978148312,
1006
+ 2380746849,
1007
+ 184941882,
1008
+ 1106717981,
1009
+ 1720750349,
1010
+ 981701307,
1011
+ 3953154731,
1012
+ 3257809181,
1013
+ 2892339376,
1014
+ 3339778166,
1015
+ 3676936849,
1016
+ 87425948,
1017
+ 3029257381,
1018
+ 2037942523,
1019
+ 3807628706,
1020
+ 2861474706,
1021
+ 1058852346,
1022
+ 1322765211,
1023
+ 2686046342,
1024
+ 2689342655,
1025
+ 2303436168,
1026
+ 2571627181,
1027
+ 1986057734,
1028
+ 1183564308,
1029
+ 2829677523,
1030
+ 1295563975,
1031
+ 503126586,
1032
+ 2025890348,
1033
+ 4179277821,
1034
+ 1735262467,
1035
+ 981331774,
1036
+ 1613447066,
1037
+ 1011606109,
1038
+ 2000062246,
1039
+ 3581448390,
1040
+ 3477731384,
1041
+ 3641307373,
1042
+ 3508544379,
1043
+ 2327233491,
1044
+ 3931944343,
1045
+ 4189052882,
1046
+ 2990416380,
1047
+ 422406169,
1048
+ 202291313,
1049
+ 2531006461,
1050
+ 4277024116,
1051
+ 3815144003,
1052
+ 821314585,
1053
+ 1344175168,
1054
+ 3562834071,
1055
+ 1339615445,
1056
+ 1831545190,
1057
+ 3115548822,
1058
+ 743512780,
1059
+ 4006999448,
1060
+ 3720181735,
1061
+ 1012033521,
1062
+ 919931041,
1063
+ 2628967879,
1064
+ 1151876565,
1065
+ 1268107129,
1066
+ 3674829936,
1067
+ 834977846,
1068
+ 743987006,
1069
+ 3947536548,
1070
+ 3706529695,
1071
+ 4121073678,
1072
+ 2507605742,
1073
+ 1595636918,
1074
+ 2708047833,
1075
+ 2427507331,
1076
+ 3868216331,
1077
+ 3254240010,
1078
+ 2097683411,
1079
+ 3279710596,
1080
+ 3686819053,
1081
+ 1843541720,
1082
+ 1683793619,
1083
+ 3245287285,
1084
+ 3571828776,
1085
+ 3733296431,
1086
+ 3806747478,
1087
+ 1390930605,
1088
+ 3860422228,
1089
+ 114397037,
1090
+ 1931519825,
1091
+ 2770684378,
1092
+ 1556101783,
1093
+ 1436111731,
1094
+ 4031950081,
1095
+ 562876656,
1096
+ 1775895782,
1097
+ 612364620,
1098
+ 1313509772,
1099
+ 4283410242,
1100
+ 3252958463,
1101
+ 2176555836,
1102
+ 3933073367,
1103
+ 3013277102,
1104
+ 1444071961,
1105
+ 3120949516,
1106
+ 2824578890,
1107
+ 325676929,
1108
+ 943677134,
1109
+ 1800649256,
1110
+ 1721927060,
1111
+ 347498719,
1112
+ 1435221321,
1113
+ 2623572981,
1114
+ 1408548470,
1115
+ 4145586315,
1116
+ 2901889237,
1117
+ 1849377952,
1118
+ 1239144551,
1119
+ 3382598266,
1120
+ 2992893897,
1121
+ 3738297588,
1122
+ 611280106,
1123
+ 3897415338,
1124
+ 2370299241,
1125
+ 1772308583,
1126
+ 3697465753,
1127
+ 354508058,
1128
+ 2702360134,
1129
+ 591308331,
1130
+ 3524072501,
1131
+ 976616000,
1132
+ 2563717192,
1133
+ 3078266097,
1134
+ 1376594703,
1135
+ 4209795919,
1136
+ 2454412767,
1137
+ 2712206031,
1138
+ 2963860163,
1139
+ 3734324882,
1140
+ 2248653800,
1141
+ 324872786,
1142
+ 3789837448,
1143
+ 3779000146,
1144
+ 527733939,
1145
+ 2844165793,
1146
+ 576499681,
1147
+ 1618787435,
1148
+ 2638888650,
1149
+ 57511068,
1150
+ 2804627518,
1151
+ 2993670030,
1152
+ 481402236,
1153
+ 2810124845,
1154
+ 1416045214,
1155
+ 1723694191,
1156
+ 1214944572,
1157
+ 3188123783,
1158
+ 1139185907,
1159
+ 3851015362,
1160
+ 1719652470,
1161
+ 1661343029,
1162
+ 3644307578,
1163
+ 3564178709,
1164
+ 1256656955,
1165
+ 46631590,
1166
+ 4231317929,
1167
+ 3098958589,
1168
+ 1834956625,
1169
+ 2206185428,
1170
+ 3695688374,
1171
+ 3647957317,
1172
+ 1064098871,
1173
+ 1739100906,
1174
+ 2579568980,
1175
+ 27974051,
1176
+ 2617466775,
1177
+ 964075233,
1178
+ 907049942,
1179
+ 4164146575,
1180
+ 3377168066,
1181
+ 2524828266,
1182
+ 1083546008,
1183
+ 2992960953,
1184
+ 2260789066,
1185
+ 1543742095,
1186
+ 2843842831,
1187
+ 1375722284,
1188
+ 3574521313,
1189
+ 110842534,
1190
+ 2310998251,
1191
+ 3076511734,
1192
+ 783145600,
1193
+ 1287776608,
1194
+ 3087144146,
1195
+ 305559823,
1196
+ 2356293719,
1197
+ 3228441476,
1198
+ 1678938122,
1199
+ 3775814061,
1200
+ 1620283952,
1201
+ 2512027726,
1202
+ 1031432407,
1203
+ 962295099,
1204
+ 3877418501,
1205
+ 968669928,
1206
+ 304126693,
1207
+ 3711291137,
1208
+ 3847527101,
1209
+ 494066767,
1210
+ 4050229756,
1211
+ 4169448589,
1212
+ 671763915,
1213
+ 1095747781,
1214
+ 4006132710,
1215
+ 394725957,
1216
+ 200521654,
1217
+ 2715998750,
1218
+ 1477567673,
1219
+ 895171901,
1220
+ 3370105999,
1221
+ 2684157455,
1222
+ 4153990023,
1223
+ 3966076501,
1224
+ 2043374409,
1225
+ 144443759,
1226
+ 6764556,
1227
+ 1611650045,
1228
+ 1480956755,
1229
+ 1388276468,
1230
+ 4136518438,
1231
+ 1538041336,
1232
+ 266773992,
1233
+ 1623357516,
1234
+ 2267298390,
1235
+ 3183919402,
1236
+ 1084292424,
1237
+ 2796136160,
1238
+ 2413448816,
1239
+ 2850375199,
1240
+ 3510894040,
1241
+ 2644778623,
1242
+ 3317288284,
1243
+ 3697317540,
1244
+ 1465776787,
1245
+ 1843489446,
1246
+ 1416711171,
1247
+ 744701117,
1248
+ 1286781349,
1249
+ 3748640476,
1250
+ 861982119,
1251
+ 2377742909,
1252
+ 1171768136,
1253
+ 2701877439,
1254
+ 3839724288,
1255
+ 2869791015,
1256
+ 2386067954,
1257
+ 2629214347,
1258
+ 955801623,
1259
+ 3831079317,
1260
+ 624
1261
+ ],
1262
+ null
1263
+ ],
1264
+ "progress_sampler": null,
1265
+ "similarity_sampler": null
1266
+ }
metrics.json ADDED
@@ -0,0 +1,369 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "step": 4500,
3
+ "metrics": {
4
+ "eval_rew_align/success_auprc_racer_val": 0.5972598636691593,
5
+ "eval_rew_align/positive_success_acc_racer_val": 0.5238095238095238,
6
+ "eval_rew_align/negative_success_acc_racer_val": 0.9725363489499192,
7
+ "eval_rew_align/loss_racer_val": 1.5039077520370483,
8
+ "eval_rew_align/pearson_racer_val": 0.8166853465988891,
9
+ "eval_rew_align/success_auprc_oxe_bc_z_eval": 0.054588487359398905,
10
+ "eval_rew_align/positive_success_acc_oxe_bc_z_eval": 0.7,
11
+ "eval_rew_align/negative_success_acc_oxe_bc_z_eval": 0.9432314410480349,
12
+ "eval_rew_align/loss_oxe_bc_z_eval": 1.7649718403816224,
13
+ "eval_rew_align/pearson_oxe_bc_z_eval": 0.5611694184881661,
14
+ "eval_rew_align/success_auprc_oxe_berkeley_cable_eval": 0.12320737550700828,
15
+ "eval_rew_align/positive_success_acc_oxe_berkeley_cable_eval": 0.7,
16
+ "eval_rew_align/negative_success_acc_oxe_berkeley_cable_eval": 0.9396299902629016,
17
+ "eval_rew_align/loss_oxe_berkeley_cable_eval": 1.6676030993461608,
18
+ "eval_rew_align/pearson_oxe_berkeley_cable_eval": 0.7626281468321523,
19
+ "eval_rew_align/success_auprc_oxe_bridge_v2_eval": 0.2226129586383097,
20
+ "eval_rew_align/positive_success_acc_oxe_bridge_v2_eval": 0.7,
21
+ "eval_rew_align/negative_success_acc_oxe_bridge_v2_eval": 0.9700440528634361,
22
+ "eval_rew_align/loss_oxe_bridge_v2_eval": 1.5779191851615906,
23
+ "eval_rew_align/pearson_oxe_bridge_v2_eval": 0.8196023502220793,
24
+ "eval_rew_align/success_auprc_oxe_jaco_eval": 0.05703350629550197,
25
+ "eval_rew_align/positive_success_acc_oxe_jaco_eval": 0.8,
26
+ "eval_rew_align/negative_success_acc_oxe_jaco_eval": 0.9796816087138668,
27
+ "eval_rew_align/loss_oxe_jaco_eval": 1.701886808872223,
28
+ "eval_rew_align/pearson_oxe_jaco_eval": 0.7369627561402344,
29
+ "eval_rew_align/success_auprc_oxe_toto_eval": 0.10819046102805713,
30
+ "eval_rew_align/positive_success_acc_oxe_toto_eval": 1.0,
31
+ "eval_rew_align/negative_success_acc_oxe_toto_eval": 0.9452054794520548,
32
+ "eval_rew_align/loss_oxe_toto_eval": 1.5248035669326783,
33
+ "eval_rew_align/pearson_oxe_toto_eval": 0.9275399402861348,
34
+ "eval_rew_align/success_auprc_oxe_viola_eval": 0.3924038961069135,
35
+ "eval_rew_align/positive_success_acc_oxe_viola_eval": 1.0,
36
+ "eval_rew_align/negative_success_acc_oxe_viola_eval": 0.9430528375733855,
37
+ "eval_rew_align/loss_oxe_viola_eval": 1.5757618188858031,
38
+ "eval_rew_align/pearson_oxe_viola_eval": 0.8978344352364431,
39
+ "eval_rew_align/success_auprc_mw_eval": 0.14365004363589842,
40
+ "eval_rew_align/positive_success_acc_mw_eval": 0.8,
41
+ "eval_rew_align/negative_success_acc_mw_eval": 0.9627450980392157,
42
+ "eval_rew_align/loss_mw_eval": 1.7702434301376342,
43
+ "eval_rew_align/pearson_mw_eval": 0.7687541228936258,
44
+ "eval_rew_align/success_auprc_libero_90": 0.1795092166845774,
45
+ "eval_rew_align/positive_success_acc_libero_90": 0.9,
46
+ "eval_rew_align/negative_success_acc_libero_90": 0.9682352941176471,
47
+ "eval_rew_align/loss_libero_90": 1.5339298248291016,
48
+ "eval_rew_align/pearson_libero_90": 0.8980980150621931,
49
+ "eval_rew_align/success_auprc_usc_trossen": 0.2819898652527857,
50
+ "eval_rew_align/positive_success_acc_usc_trossen": 0.5,
51
+ "eval_rew_align/negative_success_acc_usc_trossen": 0.98,
52
+ "eval_rew_align/loss_usc_trossen": 1.5562334299087524,
53
+ "eval_rew_align/pearson_usc_trossen": 0.7085253582776633,
54
+ "eval_p_rank/kendall_last_usc_trossen": 0.8333333333333333,
55
+ "eval_p_rank/kendall_rewind_last_usc_trossen": 1.0,
56
+ "eval_p_rank/avg_succ_subopt_diff_last_usc_trossen": 0.14124762515227,
57
+ "eval_p_rank/min_succ_subopt_diff_last_usc_trossen": 0.040902674198150635,
58
+ "eval_p_rank/max_succ_subopt_diff_last_usc_trossen": 0.2803109735250473,
59
+ "eval_p_rank/avg_subopt_fail_diff_last_usc_trossen": 0.19397936016321182,
60
+ "eval_p_rank/min_subopt_fail_diff_last_usc_trossen": 0.026902765035629272,
61
+ "eval_p_rank/max_subopt_fail_diff_last_usc_trossen": 0.3610559552907944,
62
+ "eval_p_rank/avg_succ_fail_diff_last_usc_trossen": 0.28165244973368114,
63
+ "eval_p_rank/min_succ_fail_diff_last_usc_trossen": 0.06780543923377991,
64
+ "eval_p_rank/max_succ_fail_diff_last_usc_trossen": 0.46358518302440643,
65
+ "eval_p_rank/ranking_acc_last_usc_trossen": 0.8809523809523809,
66
+ "eval_p_rank/ranking_acc_all_pairs_last_usc_trossen": 0.8809523809523809,
67
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_last_usc_trossen": 0.9375,
68
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_last_usc_trossen": 0.75,
69
+ "eval_p_rank/ranking_acc_failure_vs_successful_last_usc_trossen": 0.8888888888888888,
70
+ "eval_p_rank/kendall_avg_usc_trossen": 0.8333333333333333,
71
+ "eval_p_rank/kendall_rewind_avg_usc_trossen": 1.0,
72
+ "eval_p_rank/avg_succ_subopt_diff_avg_usc_trossen": 0.14124762515227,
73
+ "eval_p_rank/min_succ_subopt_diff_avg_usc_trossen": 0.040902674198150635,
74
+ "eval_p_rank/max_succ_subopt_diff_avg_usc_trossen": 0.2803109735250473,
75
+ "eval_p_rank/avg_subopt_fail_diff_avg_usc_trossen": 0.19397936016321182,
76
+ "eval_p_rank/min_subopt_fail_diff_avg_usc_trossen": 0.026902765035629272,
77
+ "eval_p_rank/max_subopt_fail_diff_avg_usc_trossen": 0.3610559552907944,
78
+ "eval_p_rank/avg_succ_fail_diff_avg_usc_trossen": 0.28165244973368114,
79
+ "eval_p_rank/min_succ_fail_diff_avg_usc_trossen": 0.06780543923377991,
80
+ "eval_p_rank/max_succ_fail_diff_avg_usc_trossen": 0.46358518302440643,
81
+ "eval_p_rank/ranking_acc_avg_usc_trossen": 0.8809523809523809,
82
+ "eval_p_rank/ranking_acc_all_pairs_avg_usc_trossen": 0.8809523809523809,
83
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_avg_usc_trossen": 0.9375,
84
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_avg_usc_trossen": 0.75,
85
+ "eval_p_rank/ranking_acc_failure_vs_successful_avg_usc_trossen": 0.8888888888888888,
86
+ "eval_p_rank/kendall_sum_usc_trossen": 0.8333333333333333,
87
+ "eval_p_rank/kendall_rewind_sum_usc_trossen": 1.0,
88
+ "eval_p_rank/avg_succ_subopt_diff_sum_usc_trossen": 0.14124762515227,
89
+ "eval_p_rank/min_succ_subopt_diff_sum_usc_trossen": 0.040902674198150635,
90
+ "eval_p_rank/max_succ_subopt_diff_sum_usc_trossen": 0.2803109735250473,
91
+ "eval_p_rank/avg_subopt_fail_diff_sum_usc_trossen": 0.19397936016321182,
92
+ "eval_p_rank/min_subopt_fail_diff_sum_usc_trossen": 0.026902765035629272,
93
+ "eval_p_rank/max_subopt_fail_diff_sum_usc_trossen": 0.3610559552907944,
94
+ "eval_p_rank/avg_succ_fail_diff_sum_usc_trossen": 0.28165244973368114,
95
+ "eval_p_rank/min_succ_fail_diff_sum_usc_trossen": 0.06780543923377991,
96
+ "eval_p_rank/max_succ_fail_diff_sum_usc_trossen": 0.46358518302440643,
97
+ "eval_p_rank/ranking_acc_sum_usc_trossen": 0.8809523809523809,
98
+ "eval_p_rank/ranking_acc_all_pairs_sum_usc_trossen": 0.8809523809523809,
99
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_sum_usc_trossen": 0.9375,
100
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_sum_usc_trossen": 0.75,
101
+ "eval_p_rank/ranking_acc_failure_vs_successful_sum_usc_trossen": 0.8888888888888888,
102
+ "eval_rew_align/success_auprc_rfm_new_mit_franka_nowrist": 0.13878492377635082,
103
+ "eval_rew_align/positive_success_acc_rfm_new_mit_franka_nowrist": 0.9,
104
+ "eval_rew_align/negative_success_acc_rfm_new_mit_franka_nowrist": 0.9635294117647059,
105
+ "eval_rew_align/loss_rfm_new_mit_franka_nowrist": 1.3595333456993104,
106
+ "eval_rew_align/pearson_rfm_new_mit_franka_nowrist": 0.9332205211882452,
107
+ "eval_p_rank/kendall_last_rfm_new_mit_franka_nowrist": 0.46904761904761905,
108
+ "eval_p_rank/kendall_rewind_last_rfm_new_mit_franka_nowrist": 0.8095238095238095,
109
+ "eval_p_rank/avg_succ_subopt_diff_last_rfm_new_mit_franka_nowrist": 0.1011232117811839,
110
+ "eval_p_rank/min_succ_subopt_diff_last_rfm_new_mit_franka_nowrist": 0.022794996698697445,
111
+ "eval_p_rank/max_succ_subopt_diff_last_rfm_new_mit_franka_nowrist": 0.21488183736801147,
112
+ "eval_p_rank/avg_subopt_fail_diff_last_rfm_new_mit_franka_nowrist": 0.14513030257962997,
113
+ "eval_p_rank/min_subopt_fail_diff_last_rfm_new_mit_franka_nowrist": -0.14316336512565614,
114
+ "eval_p_rank/max_subopt_fail_diff_last_rfm_new_mit_franka_nowrist": 0.34648392796516414,
115
+ "eval_p_rank/avg_succ_fail_diff_last_rfm_new_mit_franka_nowrist": 0.24625351436081383,
116
+ "eval_p_rank/min_succ_fail_diff_last_rfm_new_mit_franka_nowrist": -0.00810291568438215,
117
+ "eval_p_rank/max_succ_fail_diff_last_rfm_new_mit_franka_nowrist": 0.49043338249127066,
118
+ "eval_p_rank/ranking_acc_last_rfm_new_mit_franka_nowrist": 0.7598684210526315,
119
+ "eval_p_rank/ranking_acc_all_pairs_last_rfm_new_mit_franka_nowrist": 0.7598684210526315,
120
+ "eval_p_rank/ranking_acc_failure_vs_successful_last_rfm_new_mit_franka_nowrist": 0.8482142857142857,
121
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_last_rfm_new_mit_franka_nowrist": 0.7523809523809524,
122
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_last_rfm_new_mit_franka_nowrist": 0.6551724137931034,
123
+ "eval_p_rank/kendall_avg_rfm_new_mit_franka_nowrist": 0.46904761904761905,
124
+ "eval_p_rank/kendall_rewind_avg_rfm_new_mit_franka_nowrist": 0.8095238095238095,
125
+ "eval_p_rank/avg_succ_subopt_diff_avg_rfm_new_mit_franka_nowrist": 0.1011232117811839,
126
+ "eval_p_rank/min_succ_subopt_diff_avg_rfm_new_mit_franka_nowrist": 0.022794996698697445,
127
+ "eval_p_rank/max_succ_subopt_diff_avg_rfm_new_mit_franka_nowrist": 0.21488183736801147,
128
+ "eval_p_rank/avg_subopt_fail_diff_avg_rfm_new_mit_franka_nowrist": 0.14513030257962997,
129
+ "eval_p_rank/min_subopt_fail_diff_avg_rfm_new_mit_franka_nowrist": -0.14316336512565614,
130
+ "eval_p_rank/max_subopt_fail_diff_avg_rfm_new_mit_franka_nowrist": 0.34648392796516414,
131
+ "eval_p_rank/avg_succ_fail_diff_avg_rfm_new_mit_franka_nowrist": 0.24625351436081383,
132
+ "eval_p_rank/min_succ_fail_diff_avg_rfm_new_mit_franka_nowrist": -0.00810291568438215,
133
+ "eval_p_rank/max_succ_fail_diff_avg_rfm_new_mit_franka_nowrist": 0.49043338249127066,
134
+ "eval_p_rank/ranking_acc_avg_rfm_new_mit_franka_nowrist": 0.7598684210526315,
135
+ "eval_p_rank/ranking_acc_all_pairs_avg_rfm_new_mit_franka_nowrist": 0.7598684210526315,
136
+ "eval_p_rank/ranking_acc_failure_vs_successful_avg_rfm_new_mit_franka_nowrist": 0.8482142857142857,
137
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_avg_rfm_new_mit_franka_nowrist": 0.7523809523809524,
138
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_avg_rfm_new_mit_franka_nowrist": 0.6551724137931034,
139
+ "eval_p_rank/kendall_sum_rfm_new_mit_franka_nowrist": 0.46904761904761905,
140
+ "eval_p_rank/kendall_rewind_sum_rfm_new_mit_franka_nowrist": 0.8095238095238095,
141
+ "eval_p_rank/avg_succ_subopt_diff_sum_rfm_new_mit_franka_nowrist": 0.1011232117811839,
142
+ "eval_p_rank/min_succ_subopt_diff_sum_rfm_new_mit_franka_nowrist": 0.022794996698697445,
143
+ "eval_p_rank/max_succ_subopt_diff_sum_rfm_new_mit_franka_nowrist": 0.21488183736801147,
144
+ "eval_p_rank/avg_subopt_fail_diff_sum_rfm_new_mit_franka_nowrist": 0.14513030257962997,
145
+ "eval_p_rank/min_subopt_fail_diff_sum_rfm_new_mit_franka_nowrist": -0.14316336512565614,
146
+ "eval_p_rank/max_subopt_fail_diff_sum_rfm_new_mit_franka_nowrist": 0.34648392796516414,
147
+ "eval_p_rank/avg_succ_fail_diff_sum_rfm_new_mit_franka_nowrist": 0.24625351436081383,
148
+ "eval_p_rank/min_succ_fail_diff_sum_rfm_new_mit_franka_nowrist": -0.00810291568438215,
149
+ "eval_p_rank/max_succ_fail_diff_sum_rfm_new_mit_franka_nowrist": 0.49043338249127066,
150
+ "eval_p_rank/ranking_acc_sum_rfm_new_mit_franka_nowrist": 0.7598684210526315,
151
+ "eval_p_rank/ranking_acc_all_pairs_sum_rfm_new_mit_franka_nowrist": 0.7598684210526315,
152
+ "eval_p_rank/ranking_acc_failure_vs_successful_sum_rfm_new_mit_franka_nowrist": 0.8482142857142857,
153
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_sum_rfm_new_mit_franka_nowrist": 0.7523809523809524,
154
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_sum_rfm_new_mit_franka_nowrist": 0.6551724137931034,
155
+ "eval_rew_align/success_auprc_utd_so101_clean_top": 0.1594673014952464,
156
+ "eval_rew_align/positive_success_acc_utd_so101_clean_top": 0.8,
157
+ "eval_rew_align/negative_success_acc_utd_so101_clean_top": 0.9796078431372549,
158
+ "eval_rew_align/loss_utd_so101_clean_top": 1.422999668121338,
159
+ "eval_rew_align/pearson_utd_so101_clean_top": 0.9214771733077172,
160
+ "eval_p_rank/kendall_last_utd_so101_clean_top": 0.7333333333333333,
161
+ "eval_p_rank/kendall_rewind_last_utd_so101_clean_top": 0.7333333333333333,
162
+ "eval_p_rank/avg_succ_subopt_diff_last_utd_so101_clean_top": 0.1281689941883087,
163
+ "eval_p_rank/min_succ_subopt_diff_last_utd_so101_clean_top": -0.6224770694971085,
164
+ "eval_p_rank/max_succ_subopt_diff_last_utd_so101_clean_top": 0.4432547390460968,
165
+ "eval_p_rank/avg_subopt_fail_diff_last_utd_so101_clean_top": 0.2357720375061035,
166
+ "eval_p_rank/min_subopt_fail_diff_last_utd_so101_clean_top": -0.012576103210449219,
167
+ "eval_p_rank/max_subopt_fail_diff_last_utd_so101_clean_top": 0.5894219428300858,
168
+ "eval_p_rank/avg_succ_fail_diff_last_utd_so101_clean_top": 0.3639410316944122,
169
+ "eval_p_rank/min_succ_fail_diff_last_utd_so101_clean_top": -0.033055126667022705,
170
+ "eval_p_rank/max_succ_fail_diff_last_utd_so101_clean_top": 0.6650743782520294,
171
+ "eval_p_rank/ranking_acc_last_utd_so101_clean_top": 0.8666666666666667,
172
+ "eval_p_rank/ranking_acc_all_pairs_last_utd_so101_clean_top": 0.8666666666666667,
173
+ "eval_p_rank/ranking_acc_failure_vs_successful_last_utd_so101_clean_top": 0.9,
174
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_last_utd_so101_clean_top": 0.8,
175
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_last_utd_so101_clean_top": 0.9,
176
+ "eval_p_rank/kendall_avg_utd_so101_clean_top": 0.7333333333333333,
177
+ "eval_p_rank/kendall_rewind_avg_utd_so101_clean_top": 0.7333333333333333,
178
+ "eval_p_rank/avg_succ_subopt_diff_avg_utd_so101_clean_top": 0.1281689941883087,
179
+ "eval_p_rank/min_succ_subopt_diff_avg_utd_so101_clean_top": -0.6224770694971085,
180
+ "eval_p_rank/max_succ_subopt_diff_avg_utd_so101_clean_top": 0.4432547390460968,
181
+ "eval_p_rank/avg_subopt_fail_diff_avg_utd_so101_clean_top": 0.2357720375061035,
182
+ "eval_p_rank/min_subopt_fail_diff_avg_utd_so101_clean_top": -0.012576103210449219,
183
+ "eval_p_rank/max_subopt_fail_diff_avg_utd_so101_clean_top": 0.5894219428300858,
184
+ "eval_p_rank/avg_succ_fail_diff_avg_utd_so101_clean_top": 0.3639410316944122,
185
+ "eval_p_rank/min_succ_fail_diff_avg_utd_so101_clean_top": -0.033055126667022705,
186
+ "eval_p_rank/max_succ_fail_diff_avg_utd_so101_clean_top": 0.6650743782520294,
187
+ "eval_p_rank/ranking_acc_avg_utd_so101_clean_top": 0.8666666666666667,
188
+ "eval_p_rank/ranking_acc_all_pairs_avg_utd_so101_clean_top": 0.8666666666666667,
189
+ "eval_p_rank/ranking_acc_failure_vs_successful_avg_utd_so101_clean_top": 0.9,
190
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_avg_utd_so101_clean_top": 0.8,
191
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_avg_utd_so101_clean_top": 0.9,
192
+ "eval_p_rank/kendall_sum_utd_so101_clean_top": 0.7333333333333333,
193
+ "eval_p_rank/kendall_rewind_sum_utd_so101_clean_top": 0.7333333333333333,
194
+ "eval_p_rank/avg_succ_subopt_diff_sum_utd_so101_clean_top": 0.1281689941883087,
195
+ "eval_p_rank/min_succ_subopt_diff_sum_utd_so101_clean_top": -0.6224770694971085,
196
+ "eval_p_rank/max_succ_subopt_diff_sum_utd_so101_clean_top": 0.4432547390460968,
197
+ "eval_p_rank/avg_subopt_fail_diff_sum_utd_so101_clean_top": 0.2357720375061035,
198
+ "eval_p_rank/min_subopt_fail_diff_sum_utd_so101_clean_top": -0.012576103210449219,
199
+ "eval_p_rank/max_subopt_fail_diff_sum_utd_so101_clean_top": 0.5894219428300858,
200
+ "eval_p_rank/avg_succ_fail_diff_sum_utd_so101_clean_top": 0.3639410316944122,
201
+ "eval_p_rank/min_succ_fail_diff_sum_utd_so101_clean_top": -0.033055126667022705,
202
+ "eval_p_rank/max_succ_fail_diff_sum_utd_so101_clean_top": 0.6650743782520294,
203
+ "eval_p_rank/ranking_acc_sum_utd_so101_clean_top": 0.8666666666666667,
204
+ "eval_p_rank/ranking_acc_all_pairs_sum_utd_so101_clean_top": 0.8666666666666667,
205
+ "eval_p_rank/ranking_acc_failure_vs_successful_sum_utd_so101_clean_top": 0.9,
206
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_sum_utd_so101_clean_top": 0.8,
207
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_sum_utd_so101_clean_top": 0.9,
208
+ "eval_rew_align/success_auprc_usc_xarm": 0.3298253598253598,
209
+ "eval_rew_align/positive_success_acc_usc_xarm": 1.0,
210
+ "eval_rew_align/negative_success_acc_usc_xarm": 0.971764705882353,
211
+ "eval_rew_align/loss_usc_xarm": 1.459894859790802,
212
+ "eval_rew_align/pearson_usc_xarm": 0.9290145264370201,
213
+ "eval_p_rank/kendall_last_usc_xarm": 0.75,
214
+ "eval_p_rank/kendall_rewind_last_usc_xarm": 0.8888888888888888,
215
+ "eval_p_rank/avg_succ_subopt_diff_last_usc_xarm": 0.11288829644521077,
216
+ "eval_p_rank/min_succ_subopt_diff_last_usc_xarm": 0.0029833614826202393,
217
+ "eval_p_rank/max_succ_subopt_diff_last_usc_xarm": 0.217641681432724,
218
+ "eval_p_rank/avg_subopt_fail_diff_last_usc_xarm": 0.11646403868993123,
219
+ "eval_p_rank/min_subopt_fail_diff_last_usc_xarm": -0.03846535086631775,
220
+ "eval_p_rank/max_subopt_fail_diff_last_usc_xarm": 0.34273654222488403,
221
+ "eval_p_rank/avg_succ_fail_diff_last_usc_xarm": 0.229352335135142,
222
+ "eval_p_rank/min_succ_fail_diff_last_usc_xarm": 0.0956188440322876,
223
+ "eval_p_rank/max_succ_fail_diff_last_usc_xarm": 0.35525771975517273,
224
+ "eval_p_rank/ranking_acc_last_usc_xarm": 0.875,
225
+ "eval_p_rank/ranking_acc_all_pairs_last_usc_xarm": 0.875,
226
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_last_usc_xarm": 0.7083333333333334,
227
+ "eval_p_rank/ranking_acc_failure_vs_successful_last_usc_xarm": 1.0,
228
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_last_usc_xarm": 0.9166666666666666,
229
+ "eval_p_rank/kendall_avg_usc_xarm": 0.75,
230
+ "eval_p_rank/kendall_rewind_avg_usc_xarm": 0.8888888888888888,
231
+ "eval_p_rank/avg_succ_subopt_diff_avg_usc_xarm": 0.11288829644521077,
232
+ "eval_p_rank/min_succ_subopt_diff_avg_usc_xarm": 0.0029833614826202393,
233
+ "eval_p_rank/max_succ_subopt_diff_avg_usc_xarm": 0.217641681432724,
234
+ "eval_p_rank/avg_subopt_fail_diff_avg_usc_xarm": 0.11646403868993123,
235
+ "eval_p_rank/min_subopt_fail_diff_avg_usc_xarm": -0.03846535086631775,
236
+ "eval_p_rank/max_subopt_fail_diff_avg_usc_xarm": 0.34273654222488403,
237
+ "eval_p_rank/avg_succ_fail_diff_avg_usc_xarm": 0.229352335135142,
238
+ "eval_p_rank/min_succ_fail_diff_avg_usc_xarm": 0.0956188440322876,
239
+ "eval_p_rank/max_succ_fail_diff_avg_usc_xarm": 0.35525771975517273,
240
+ "eval_p_rank/ranking_acc_avg_usc_xarm": 0.875,
241
+ "eval_p_rank/ranking_acc_all_pairs_avg_usc_xarm": 0.875,
242
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_avg_usc_xarm": 0.7083333333333334,
243
+ "eval_p_rank/ranking_acc_failure_vs_successful_avg_usc_xarm": 1.0,
244
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_avg_usc_xarm": 0.9166666666666666,
245
+ "eval_p_rank/kendall_sum_usc_xarm": 0.75,
246
+ "eval_p_rank/kendall_rewind_sum_usc_xarm": 0.8888888888888888,
247
+ "eval_p_rank/avg_succ_subopt_diff_sum_usc_xarm": 0.11288829644521077,
248
+ "eval_p_rank/min_succ_subopt_diff_sum_usc_xarm": 0.0029833614826202393,
249
+ "eval_p_rank/max_succ_subopt_diff_sum_usc_xarm": 0.217641681432724,
250
+ "eval_p_rank/avg_subopt_fail_diff_sum_usc_xarm": 0.11646403868993123,
251
+ "eval_p_rank/min_subopt_fail_diff_sum_usc_xarm": -0.03846535086631775,
252
+ "eval_p_rank/max_subopt_fail_diff_sum_usc_xarm": 0.34273654222488403,
253
+ "eval_p_rank/avg_succ_fail_diff_sum_usc_xarm": 0.229352335135142,
254
+ "eval_p_rank/min_succ_fail_diff_sum_usc_xarm": 0.0956188440322876,
255
+ "eval_p_rank/max_succ_fail_diff_sum_usc_xarm": 0.35525771975517273,
256
+ "eval_p_rank/ranking_acc_sum_usc_xarm": 0.875,
257
+ "eval_p_rank/ranking_acc_all_pairs_sum_usc_xarm": 0.875,
258
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_sum_usc_xarm": 0.7083333333333334,
259
+ "eval_p_rank/ranking_acc_failure_vs_successful_sum_usc_xarm": 1.0,
260
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_sum_usc_xarm": 0.9166666666666666,
261
+ "eval_rew_align/success_auprc_usc_franka": 0.178648534454372,
262
+ "eval_rew_align/positive_success_acc_usc_franka": 0.75,
263
+ "eval_rew_align/negative_success_acc_usc_franka": 0.9436274509803921,
264
+ "eval_rew_align/loss_usc_franka": 1.4907499551773071,
265
+ "eval_rew_align/pearson_usc_franka": 0.9115594502071923,
266
+ "eval_p_rank/kendall_last_usc_franka": 0.7916666666666666,
267
+ "eval_p_rank/kendall_rewind_last_usc_franka": 0.8333333333333334,
268
+ "eval_p_rank/avg_succ_subopt_diff_last_usc_franka": 0.06161930412054062,
269
+ "eval_p_rank/min_succ_subopt_diff_last_usc_franka": -0.010589927434921265,
270
+ "eval_p_rank/max_succ_subopt_diff_last_usc_franka": 0.17146822810173035,
271
+ "eval_p_rank/avg_subopt_fail_diff_last_usc_franka": 0.18651490285992622,
272
+ "eval_p_rank/min_subopt_fail_diff_last_usc_franka": 0.0136566162109375,
273
+ "eval_p_rank/max_subopt_fail_diff_last_usc_franka": 0.3522116541862488,
274
+ "eval_p_rank/avg_succ_fail_diff_last_usc_franka": 0.24813420698046684,
275
+ "eval_p_rank/min_succ_fail_diff_last_usc_franka": 0.032290756702423096,
276
+ "eval_p_rank/max_succ_fail_diff_last_usc_franka": 0.4191764295101166,
277
+ "eval_p_rank/ranking_acc_last_usc_franka": 0.8958333333333334,
278
+ "eval_p_rank/ranking_acc_all_pairs_last_usc_franka": 0.8958333333333334,
279
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_last_usc_franka": 0.8125,
280
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_last_usc_franka": 0.9375,
281
+ "eval_p_rank/ranking_acc_failure_vs_successful_last_usc_franka": 0.9375,
282
+ "eval_p_rank/kendall_avg_usc_franka": 0.7916666666666666,
283
+ "eval_p_rank/kendall_rewind_avg_usc_franka": 0.8333333333333334,
284
+ "eval_p_rank/avg_succ_subopt_diff_avg_usc_franka": 0.06161930412054062,
285
+ "eval_p_rank/min_succ_subopt_diff_avg_usc_franka": -0.010589927434921265,
286
+ "eval_p_rank/max_succ_subopt_diff_avg_usc_franka": 0.17146822810173035,
287
+ "eval_p_rank/avg_subopt_fail_diff_avg_usc_franka": 0.18651490285992622,
288
+ "eval_p_rank/min_subopt_fail_diff_avg_usc_franka": 0.0136566162109375,
289
+ "eval_p_rank/max_subopt_fail_diff_avg_usc_franka": 0.3522116541862488,
290
+ "eval_p_rank/avg_succ_fail_diff_avg_usc_franka": 0.24813420698046684,
291
+ "eval_p_rank/min_succ_fail_diff_avg_usc_franka": 0.032290756702423096,
292
+ "eval_p_rank/max_succ_fail_diff_avg_usc_franka": 0.4191764295101166,
293
+ "eval_p_rank/ranking_acc_avg_usc_franka": 0.8958333333333334,
294
+ "eval_p_rank/ranking_acc_all_pairs_avg_usc_franka": 0.8958333333333334,
295
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_avg_usc_franka": 0.8125,
296
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_avg_usc_franka": 0.9375,
297
+ "eval_p_rank/ranking_acc_failure_vs_successful_avg_usc_franka": 0.9375,
298
+ "eval_p_rank/kendall_sum_usc_franka": 0.7916666666666666,
299
+ "eval_p_rank/kendall_rewind_sum_usc_franka": 0.8333333333333334,
300
+ "eval_p_rank/avg_succ_subopt_diff_sum_usc_franka": 0.06161930412054062,
301
+ "eval_p_rank/min_succ_subopt_diff_sum_usc_franka": -0.010589927434921265,
302
+ "eval_p_rank/max_succ_subopt_diff_sum_usc_franka": 0.17146822810173035,
303
+ "eval_p_rank/avg_subopt_fail_diff_sum_usc_franka": 0.18651490285992622,
304
+ "eval_p_rank/min_subopt_fail_diff_sum_usc_franka": 0.0136566162109375,
305
+ "eval_p_rank/max_subopt_fail_diff_sum_usc_franka": 0.3522116541862488,
306
+ "eval_p_rank/avg_succ_fail_diff_sum_usc_franka": 0.24813420698046684,
307
+ "eval_p_rank/min_succ_fail_diff_sum_usc_franka": 0.032290756702423096,
308
+ "eval_p_rank/max_succ_fail_diff_sum_usc_franka": 0.4191764295101166,
309
+ "eval_p_rank/ranking_acc_sum_usc_franka": 0.8958333333333334,
310
+ "eval_p_rank/ranking_acc_all_pairs_sum_usc_franka": 0.8958333333333334,
311
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_sum_usc_franka": 0.8125,
312
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_sum_usc_franka": 0.9375,
313
+ "eval_p_rank/ranking_acc_failure_vs_successful_sum_usc_franka": 0.9375,
314
+ "eval_rew_align/success_auprc_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.08961776352588778,
315
+ "eval_rew_align/positive_success_acc_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.9,
316
+ "eval_rew_align/negative_success_acc_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.9552941176470588,
317
+ "eval_rew_align/loss_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 1.5833105087280273,
318
+ "eval_rew_align/pearson_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.8841339237987327,
319
+ "eval_p_rank/kendall_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.3093333333333333,
320
+ "eval_p_rank/kendall_rewind_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.5333333333333333,
321
+ "eval_p_rank/avg_succ_subopt_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.02896499633789066,
322
+ "eval_p_rank/min_succ_subopt_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.23819030523300178,
323
+ "eval_p_rank/max_succ_subopt_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.0715120196342468,
324
+ "eval_p_rank/avg_subopt_fail_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.20831456266343595,
325
+ "eval_p_rank/min_subopt_fail_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.16911142468452456,
326
+ "eval_p_rank/max_subopt_fail_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.4413899302482605,
327
+ "eval_p_rank/avg_succ_fail_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.17934956632554527,
328
+ "eval_p_rank/min_succ_fail_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.09759940505027775,
329
+ "eval_p_rank/max_succ_fail_diff_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.4535657167434692,
330
+ "eval_p_rank/ranking_acc_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.6546666666666666,
331
+ "eval_p_rank/ranking_acc_all_pairs_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.6546666666666666,
332
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.76,
333
+ "eval_p_rank/ranking_acc_failure_vs_successful_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.744,
334
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_last_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.46,
335
+ "eval_p_rank/kendall_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.3093333333333333,
336
+ "eval_p_rank/kendall_rewind_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.5333333333333333,
337
+ "eval_p_rank/avg_succ_subopt_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.02896499633789066,
338
+ "eval_p_rank/min_succ_subopt_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.23819030523300178,
339
+ "eval_p_rank/max_succ_subopt_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.0715120196342468,
340
+ "eval_p_rank/avg_subopt_fail_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.20831456266343595,
341
+ "eval_p_rank/min_subopt_fail_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.16911142468452456,
342
+ "eval_p_rank/max_subopt_fail_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.4413899302482605,
343
+ "eval_p_rank/avg_succ_fail_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.17934956632554527,
344
+ "eval_p_rank/min_succ_fail_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.09759940505027775,
345
+ "eval_p_rank/max_succ_fail_diff_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.4535657167434692,
346
+ "eval_p_rank/ranking_acc_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.6546666666666666,
347
+ "eval_p_rank/ranking_acc_all_pairs_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.6546666666666666,
348
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.76,
349
+ "eval_p_rank/ranking_acc_failure_vs_successful_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.744,
350
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_avg_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.46,
351
+ "eval_p_rank/kendall_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.3093333333333333,
352
+ "eval_p_rank/kendall_rewind_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.5333333333333333,
353
+ "eval_p_rank/avg_succ_subopt_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.02896499633789066,
354
+ "eval_p_rank/min_succ_subopt_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.23819030523300178,
355
+ "eval_p_rank/max_succ_subopt_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.0715120196342468,
356
+ "eval_p_rank/avg_subopt_fail_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.20831456266343595,
357
+ "eval_p_rank/min_subopt_fail_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.16911142468452456,
358
+ "eval_p_rank/max_subopt_fail_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.4413899302482605,
359
+ "eval_p_rank/avg_succ_fail_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.17934956632554527,
360
+ "eval_p_rank/min_succ_fail_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": -0.09759940505027775,
361
+ "eval_p_rank/max_succ_fail_diff_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.4535657167434692,
362
+ "eval_p_rank/ranking_acc_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.6546666666666666,
363
+ "eval_p_rank/ranking_acc_all_pairs_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.6546666666666666,
364
+ "eval_p_rank/ranking_acc_failure_vs_suboptimal_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.76,
365
+ "eval_p_rank/ranking_acc_failure_vs_successful_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.744,
366
+ "eval_p_rank/ranking_acc_suboptimal_vs_successful_sum_jesbu1_usc_koch_p_ranking_rfm_usc_koch_p_ranking_all": 0.46,
367
+ "time/custom_evaluations": 227.5345072869677
368
+ }
369
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2cfa81761595f6b10bd079736eb32bdeeeb0b49224619411bd511d9d1f1171a4
3
+ size 4965629698
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a25be9713ccc3cb14738be76ef6665742093d70323ec026ea70e8fa843a342be
3
+ size 3935034144
model.safetensors.index.json ADDED
@@ -0,0 +1,746 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "metadata": {
3
+ "total_parameters": 4450286861,
4
+ "total_size": 8900573722
5
+ },
6
+ "weight_map": {
7
+ "frame_pool_attn.weight": "model-00002-of-00002.safetensors",
8
+ "model.language_model.embed_tokens.weight": "model-00001-of-00002.safetensors",
9
+ "model.language_model.layers.0.input_layernorm.weight": "model-00001-of-00002.safetensors",
10
+ "model.language_model.layers.0.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
11
+ "model.language_model.layers.0.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
12
+ "model.language_model.layers.0.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
13
+ "model.language_model.layers.0.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
14
+ "model.language_model.layers.0.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
15
+ "model.language_model.layers.0.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
16
+ "model.language_model.layers.0.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
17
+ "model.language_model.layers.0.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
18
+ "model.language_model.layers.0.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
19
+ "model.language_model.layers.0.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
20
+ "model.language_model.layers.1.input_layernorm.weight": "model-00001-of-00002.safetensors",
21
+ "model.language_model.layers.1.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
22
+ "model.language_model.layers.1.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
23
+ "model.language_model.layers.1.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
24
+ "model.language_model.layers.1.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
25
+ "model.language_model.layers.1.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
26
+ "model.language_model.layers.1.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
27
+ "model.language_model.layers.1.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
28
+ "model.language_model.layers.1.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
29
+ "model.language_model.layers.1.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
30
+ "model.language_model.layers.1.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
31
+ "model.language_model.layers.10.input_layernorm.weight": "model-00001-of-00002.safetensors",
32
+ "model.language_model.layers.10.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
33
+ "model.language_model.layers.10.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
34
+ "model.language_model.layers.10.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
35
+ "model.language_model.layers.10.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
36
+ "model.language_model.layers.10.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
37
+ "model.language_model.layers.10.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
38
+ "model.language_model.layers.10.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
39
+ "model.language_model.layers.10.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
40
+ "model.language_model.layers.10.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
41
+ "model.language_model.layers.10.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
42
+ "model.language_model.layers.11.input_layernorm.weight": "model-00001-of-00002.safetensors",
43
+ "model.language_model.layers.11.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
44
+ "model.language_model.layers.11.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
45
+ "model.language_model.layers.11.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
46
+ "model.language_model.layers.11.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
47
+ "model.language_model.layers.11.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
48
+ "model.language_model.layers.11.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
49
+ "model.language_model.layers.11.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
50
+ "model.language_model.layers.11.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
51
+ "model.language_model.layers.11.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
52
+ "model.language_model.layers.11.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
53
+ "model.language_model.layers.12.input_layernorm.weight": "model-00001-of-00002.safetensors",
54
+ "model.language_model.layers.12.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
55
+ "model.language_model.layers.12.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
56
+ "model.language_model.layers.12.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
57
+ "model.language_model.layers.12.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
58
+ "model.language_model.layers.12.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
59
+ "model.language_model.layers.12.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
60
+ "model.language_model.layers.12.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
61
+ "model.language_model.layers.12.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
62
+ "model.language_model.layers.12.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
63
+ "model.language_model.layers.12.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
64
+ "model.language_model.layers.13.input_layernorm.weight": "model-00001-of-00002.safetensors",
65
+ "model.language_model.layers.13.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
66
+ "model.language_model.layers.13.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
67
+ "model.language_model.layers.13.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
68
+ "model.language_model.layers.13.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
69
+ "model.language_model.layers.13.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
70
+ "model.language_model.layers.13.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
71
+ "model.language_model.layers.13.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
72
+ "model.language_model.layers.13.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
73
+ "model.language_model.layers.13.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
74
+ "model.language_model.layers.13.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
75
+ "model.language_model.layers.14.input_layernorm.weight": "model-00001-of-00002.safetensors",
76
+ "model.language_model.layers.14.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
77
+ "model.language_model.layers.14.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
78
+ "model.language_model.layers.14.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
79
+ "model.language_model.layers.14.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
80
+ "model.language_model.layers.14.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
81
+ "model.language_model.layers.14.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
82
+ "model.language_model.layers.14.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
83
+ "model.language_model.layers.14.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
84
+ "model.language_model.layers.14.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
85
+ "model.language_model.layers.14.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
86
+ "model.language_model.layers.15.input_layernorm.weight": "model-00001-of-00002.safetensors",
87
+ "model.language_model.layers.15.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
88
+ "model.language_model.layers.15.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
89
+ "model.language_model.layers.15.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
90
+ "model.language_model.layers.15.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
91
+ "model.language_model.layers.15.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
92
+ "model.language_model.layers.15.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
93
+ "model.language_model.layers.15.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
94
+ "model.language_model.layers.15.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
95
+ "model.language_model.layers.15.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
96
+ "model.language_model.layers.15.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
97
+ "model.language_model.layers.16.input_layernorm.weight": "model-00002-of-00002.safetensors",
98
+ "model.language_model.layers.16.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
99
+ "model.language_model.layers.16.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
100
+ "model.language_model.layers.16.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
101
+ "model.language_model.layers.16.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
102
+ "model.language_model.layers.16.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
103
+ "model.language_model.layers.16.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
104
+ "model.language_model.layers.16.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
105
+ "model.language_model.layers.16.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
106
+ "model.language_model.layers.16.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
107
+ "model.language_model.layers.16.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
108
+ "model.language_model.layers.17.input_layernorm.weight": "model-00002-of-00002.safetensors",
109
+ "model.language_model.layers.17.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
110
+ "model.language_model.layers.17.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
111
+ "model.language_model.layers.17.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
112
+ "model.language_model.layers.17.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
113
+ "model.language_model.layers.17.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
114
+ "model.language_model.layers.17.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
115
+ "model.language_model.layers.17.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
116
+ "model.language_model.layers.17.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
117
+ "model.language_model.layers.17.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
118
+ "model.language_model.layers.17.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
119
+ "model.language_model.layers.18.input_layernorm.weight": "model-00002-of-00002.safetensors",
120
+ "model.language_model.layers.18.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
121
+ "model.language_model.layers.18.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
122
+ "model.language_model.layers.18.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
123
+ "model.language_model.layers.18.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
124
+ "model.language_model.layers.18.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
125
+ "model.language_model.layers.18.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
126
+ "model.language_model.layers.18.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
127
+ "model.language_model.layers.18.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
128
+ "model.language_model.layers.18.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
129
+ "model.language_model.layers.18.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
130
+ "model.language_model.layers.19.input_layernorm.weight": "model-00002-of-00002.safetensors",
131
+ "model.language_model.layers.19.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
132
+ "model.language_model.layers.19.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
133
+ "model.language_model.layers.19.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
134
+ "model.language_model.layers.19.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
135
+ "model.language_model.layers.19.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
136
+ "model.language_model.layers.19.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
137
+ "model.language_model.layers.19.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
138
+ "model.language_model.layers.19.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
139
+ "model.language_model.layers.19.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
140
+ "model.language_model.layers.19.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
141
+ "model.language_model.layers.2.input_layernorm.weight": "model-00001-of-00002.safetensors",
142
+ "model.language_model.layers.2.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
143
+ "model.language_model.layers.2.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
144
+ "model.language_model.layers.2.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
145
+ "model.language_model.layers.2.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
146
+ "model.language_model.layers.2.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
147
+ "model.language_model.layers.2.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
148
+ "model.language_model.layers.2.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
149
+ "model.language_model.layers.2.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
150
+ "model.language_model.layers.2.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
151
+ "model.language_model.layers.2.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
152
+ "model.language_model.layers.20.input_layernorm.weight": "model-00002-of-00002.safetensors",
153
+ "model.language_model.layers.20.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
154
+ "model.language_model.layers.20.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
155
+ "model.language_model.layers.20.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
156
+ "model.language_model.layers.20.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
157
+ "model.language_model.layers.20.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
158
+ "model.language_model.layers.20.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
159
+ "model.language_model.layers.20.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
160
+ "model.language_model.layers.20.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
161
+ "model.language_model.layers.20.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
162
+ "model.language_model.layers.20.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
163
+ "model.language_model.layers.21.input_layernorm.weight": "model-00002-of-00002.safetensors",
164
+ "model.language_model.layers.21.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
165
+ "model.language_model.layers.21.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
166
+ "model.language_model.layers.21.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
167
+ "model.language_model.layers.21.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
168
+ "model.language_model.layers.21.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
169
+ "model.language_model.layers.21.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
170
+ "model.language_model.layers.21.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
171
+ "model.language_model.layers.21.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
172
+ "model.language_model.layers.21.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
173
+ "model.language_model.layers.21.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
174
+ "model.language_model.layers.22.input_layernorm.weight": "model-00002-of-00002.safetensors",
175
+ "model.language_model.layers.22.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
176
+ "model.language_model.layers.22.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
177
+ "model.language_model.layers.22.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
178
+ "model.language_model.layers.22.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
179
+ "model.language_model.layers.22.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
180
+ "model.language_model.layers.22.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
181
+ "model.language_model.layers.22.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
182
+ "model.language_model.layers.22.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
183
+ "model.language_model.layers.22.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
184
+ "model.language_model.layers.22.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
185
+ "model.language_model.layers.23.input_layernorm.weight": "model-00002-of-00002.safetensors",
186
+ "model.language_model.layers.23.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
187
+ "model.language_model.layers.23.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
188
+ "model.language_model.layers.23.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
189
+ "model.language_model.layers.23.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
190
+ "model.language_model.layers.23.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
191
+ "model.language_model.layers.23.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
192
+ "model.language_model.layers.23.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
193
+ "model.language_model.layers.23.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
194
+ "model.language_model.layers.23.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
195
+ "model.language_model.layers.23.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
196
+ "model.language_model.layers.24.input_layernorm.weight": "model-00002-of-00002.safetensors",
197
+ "model.language_model.layers.24.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
198
+ "model.language_model.layers.24.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
199
+ "model.language_model.layers.24.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
200
+ "model.language_model.layers.24.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
201
+ "model.language_model.layers.24.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
202
+ "model.language_model.layers.24.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
203
+ "model.language_model.layers.24.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
204
+ "model.language_model.layers.24.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
205
+ "model.language_model.layers.24.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
206
+ "model.language_model.layers.24.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
207
+ "model.language_model.layers.25.input_layernorm.weight": "model-00002-of-00002.safetensors",
208
+ "model.language_model.layers.25.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
209
+ "model.language_model.layers.25.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
210
+ "model.language_model.layers.25.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
211
+ "model.language_model.layers.25.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
212
+ "model.language_model.layers.25.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
213
+ "model.language_model.layers.25.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
214
+ "model.language_model.layers.25.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
215
+ "model.language_model.layers.25.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
216
+ "model.language_model.layers.25.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
217
+ "model.language_model.layers.25.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
218
+ "model.language_model.layers.26.input_layernorm.weight": "model-00002-of-00002.safetensors",
219
+ "model.language_model.layers.26.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
220
+ "model.language_model.layers.26.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
221
+ "model.language_model.layers.26.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
222
+ "model.language_model.layers.26.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
223
+ "model.language_model.layers.26.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
224
+ "model.language_model.layers.26.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
225
+ "model.language_model.layers.26.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
226
+ "model.language_model.layers.26.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
227
+ "model.language_model.layers.26.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
228
+ "model.language_model.layers.26.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
229
+ "model.language_model.layers.27.input_layernorm.weight": "model-00002-of-00002.safetensors",
230
+ "model.language_model.layers.27.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
231
+ "model.language_model.layers.27.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
232
+ "model.language_model.layers.27.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
233
+ "model.language_model.layers.27.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
234
+ "model.language_model.layers.27.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
235
+ "model.language_model.layers.27.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
236
+ "model.language_model.layers.27.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
237
+ "model.language_model.layers.27.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
238
+ "model.language_model.layers.27.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
239
+ "model.language_model.layers.27.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
240
+ "model.language_model.layers.28.input_layernorm.weight": "model-00002-of-00002.safetensors",
241
+ "model.language_model.layers.28.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
242
+ "model.language_model.layers.28.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
243
+ "model.language_model.layers.28.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
244
+ "model.language_model.layers.28.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
245
+ "model.language_model.layers.28.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
246
+ "model.language_model.layers.28.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
247
+ "model.language_model.layers.28.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
248
+ "model.language_model.layers.28.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
249
+ "model.language_model.layers.28.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
250
+ "model.language_model.layers.28.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
251
+ "model.language_model.layers.29.input_layernorm.weight": "model-00002-of-00002.safetensors",
252
+ "model.language_model.layers.29.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
253
+ "model.language_model.layers.29.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
254
+ "model.language_model.layers.29.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
255
+ "model.language_model.layers.29.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
256
+ "model.language_model.layers.29.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
257
+ "model.language_model.layers.29.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
258
+ "model.language_model.layers.29.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
259
+ "model.language_model.layers.29.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
260
+ "model.language_model.layers.29.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
261
+ "model.language_model.layers.29.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
262
+ "model.language_model.layers.3.input_layernorm.weight": "model-00001-of-00002.safetensors",
263
+ "model.language_model.layers.3.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
264
+ "model.language_model.layers.3.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
265
+ "model.language_model.layers.3.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
266
+ "model.language_model.layers.3.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
267
+ "model.language_model.layers.3.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
268
+ "model.language_model.layers.3.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
269
+ "model.language_model.layers.3.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
270
+ "model.language_model.layers.3.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
271
+ "model.language_model.layers.3.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
272
+ "model.language_model.layers.3.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
273
+ "model.language_model.layers.30.input_layernorm.weight": "model-00002-of-00002.safetensors",
274
+ "model.language_model.layers.30.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
275
+ "model.language_model.layers.30.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
276
+ "model.language_model.layers.30.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
277
+ "model.language_model.layers.30.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
278
+ "model.language_model.layers.30.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
279
+ "model.language_model.layers.30.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
280
+ "model.language_model.layers.30.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
281
+ "model.language_model.layers.30.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
282
+ "model.language_model.layers.30.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
283
+ "model.language_model.layers.30.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
284
+ "model.language_model.layers.31.input_layernorm.weight": "model-00002-of-00002.safetensors",
285
+ "model.language_model.layers.31.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
286
+ "model.language_model.layers.31.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
287
+ "model.language_model.layers.31.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
288
+ "model.language_model.layers.31.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
289
+ "model.language_model.layers.31.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
290
+ "model.language_model.layers.31.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
291
+ "model.language_model.layers.31.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
292
+ "model.language_model.layers.31.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
293
+ "model.language_model.layers.31.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
294
+ "model.language_model.layers.31.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
295
+ "model.language_model.layers.32.input_layernorm.weight": "model-00002-of-00002.safetensors",
296
+ "model.language_model.layers.32.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
297
+ "model.language_model.layers.32.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
298
+ "model.language_model.layers.32.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
299
+ "model.language_model.layers.32.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
300
+ "model.language_model.layers.32.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
301
+ "model.language_model.layers.32.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
302
+ "model.language_model.layers.32.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
303
+ "model.language_model.layers.32.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
304
+ "model.language_model.layers.32.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
305
+ "model.language_model.layers.32.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
306
+ "model.language_model.layers.33.input_layernorm.weight": "model-00002-of-00002.safetensors",
307
+ "model.language_model.layers.33.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
308
+ "model.language_model.layers.33.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
309
+ "model.language_model.layers.33.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
310
+ "model.language_model.layers.33.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
311
+ "model.language_model.layers.33.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
312
+ "model.language_model.layers.33.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
313
+ "model.language_model.layers.33.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
314
+ "model.language_model.layers.33.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
315
+ "model.language_model.layers.33.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
316
+ "model.language_model.layers.33.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
317
+ "model.language_model.layers.34.input_layernorm.weight": "model-00002-of-00002.safetensors",
318
+ "model.language_model.layers.34.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
319
+ "model.language_model.layers.34.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
320
+ "model.language_model.layers.34.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
321
+ "model.language_model.layers.34.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
322
+ "model.language_model.layers.34.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
323
+ "model.language_model.layers.34.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
324
+ "model.language_model.layers.34.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
325
+ "model.language_model.layers.34.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
326
+ "model.language_model.layers.34.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
327
+ "model.language_model.layers.34.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
328
+ "model.language_model.layers.35.input_layernorm.weight": "model-00002-of-00002.safetensors",
329
+ "model.language_model.layers.35.mlp.down_proj.weight": "model-00002-of-00002.safetensors",
330
+ "model.language_model.layers.35.mlp.gate_proj.weight": "model-00002-of-00002.safetensors",
331
+ "model.language_model.layers.35.mlp.up_proj.weight": "model-00002-of-00002.safetensors",
332
+ "model.language_model.layers.35.post_attention_layernorm.weight": "model-00002-of-00002.safetensors",
333
+ "model.language_model.layers.35.self_attn.k_norm.weight": "model-00002-of-00002.safetensors",
334
+ "model.language_model.layers.35.self_attn.k_proj.weight": "model-00002-of-00002.safetensors",
335
+ "model.language_model.layers.35.self_attn.o_proj.weight": "model-00002-of-00002.safetensors",
336
+ "model.language_model.layers.35.self_attn.q_norm.weight": "model-00002-of-00002.safetensors",
337
+ "model.language_model.layers.35.self_attn.q_proj.weight": "model-00002-of-00002.safetensors",
338
+ "model.language_model.layers.35.self_attn.v_proj.weight": "model-00002-of-00002.safetensors",
339
+ "model.language_model.layers.4.input_layernorm.weight": "model-00001-of-00002.safetensors",
340
+ "model.language_model.layers.4.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
341
+ "model.language_model.layers.4.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
342
+ "model.language_model.layers.4.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
343
+ "model.language_model.layers.4.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
344
+ "model.language_model.layers.4.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
345
+ "model.language_model.layers.4.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
346
+ "model.language_model.layers.4.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
347
+ "model.language_model.layers.4.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
348
+ "model.language_model.layers.4.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
349
+ "model.language_model.layers.4.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
350
+ "model.language_model.layers.5.input_layernorm.weight": "model-00001-of-00002.safetensors",
351
+ "model.language_model.layers.5.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
352
+ "model.language_model.layers.5.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
353
+ "model.language_model.layers.5.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
354
+ "model.language_model.layers.5.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
355
+ "model.language_model.layers.5.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
356
+ "model.language_model.layers.5.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
357
+ "model.language_model.layers.5.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
358
+ "model.language_model.layers.5.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
359
+ "model.language_model.layers.5.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
360
+ "model.language_model.layers.5.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
361
+ "model.language_model.layers.6.input_layernorm.weight": "model-00001-of-00002.safetensors",
362
+ "model.language_model.layers.6.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
363
+ "model.language_model.layers.6.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
364
+ "model.language_model.layers.6.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
365
+ "model.language_model.layers.6.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
366
+ "model.language_model.layers.6.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
367
+ "model.language_model.layers.6.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
368
+ "model.language_model.layers.6.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
369
+ "model.language_model.layers.6.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
370
+ "model.language_model.layers.6.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
371
+ "model.language_model.layers.6.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
372
+ "model.language_model.layers.7.input_layernorm.weight": "model-00001-of-00002.safetensors",
373
+ "model.language_model.layers.7.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
374
+ "model.language_model.layers.7.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
375
+ "model.language_model.layers.7.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
376
+ "model.language_model.layers.7.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
377
+ "model.language_model.layers.7.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
378
+ "model.language_model.layers.7.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
379
+ "model.language_model.layers.7.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
380
+ "model.language_model.layers.7.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
381
+ "model.language_model.layers.7.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
382
+ "model.language_model.layers.7.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
383
+ "model.language_model.layers.8.input_layernorm.weight": "model-00001-of-00002.safetensors",
384
+ "model.language_model.layers.8.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
385
+ "model.language_model.layers.8.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
386
+ "model.language_model.layers.8.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
387
+ "model.language_model.layers.8.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
388
+ "model.language_model.layers.8.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
389
+ "model.language_model.layers.8.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
390
+ "model.language_model.layers.8.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
391
+ "model.language_model.layers.8.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
392
+ "model.language_model.layers.8.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
393
+ "model.language_model.layers.8.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
394
+ "model.language_model.layers.9.input_layernorm.weight": "model-00001-of-00002.safetensors",
395
+ "model.language_model.layers.9.mlp.down_proj.weight": "model-00001-of-00002.safetensors",
396
+ "model.language_model.layers.9.mlp.gate_proj.weight": "model-00001-of-00002.safetensors",
397
+ "model.language_model.layers.9.mlp.up_proj.weight": "model-00001-of-00002.safetensors",
398
+ "model.language_model.layers.9.post_attention_layernorm.weight": "model-00001-of-00002.safetensors",
399
+ "model.language_model.layers.9.self_attn.k_norm.weight": "model-00001-of-00002.safetensors",
400
+ "model.language_model.layers.9.self_attn.k_proj.weight": "model-00001-of-00002.safetensors",
401
+ "model.language_model.layers.9.self_attn.o_proj.weight": "model-00001-of-00002.safetensors",
402
+ "model.language_model.layers.9.self_attn.q_norm.weight": "model-00001-of-00002.safetensors",
403
+ "model.language_model.layers.9.self_attn.q_proj.weight": "model-00001-of-00002.safetensors",
404
+ "model.language_model.layers.9.self_attn.v_proj.weight": "model-00001-of-00002.safetensors",
405
+ "model.language_model.norm.weight": "model-00002-of-00002.safetensors",
406
+ "model.visual.blocks.0.attn.proj.bias": "model-00001-of-00002.safetensors",
407
+ "model.visual.blocks.0.attn.proj.weight": "model-00001-of-00002.safetensors",
408
+ "model.visual.blocks.0.attn.qkv.bias": "model-00001-of-00002.safetensors",
409
+ "model.visual.blocks.0.attn.qkv.weight": "model-00001-of-00002.safetensors",
410
+ "model.visual.blocks.0.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
411
+ "model.visual.blocks.0.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
412
+ "model.visual.blocks.0.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
413
+ "model.visual.blocks.0.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
414
+ "model.visual.blocks.0.norm1.bias": "model-00001-of-00002.safetensors",
415
+ "model.visual.blocks.0.norm1.weight": "model-00001-of-00002.safetensors",
416
+ "model.visual.blocks.0.norm2.bias": "model-00001-of-00002.safetensors",
417
+ "model.visual.blocks.0.norm2.weight": "model-00001-of-00002.safetensors",
418
+ "model.visual.blocks.1.attn.proj.bias": "model-00001-of-00002.safetensors",
419
+ "model.visual.blocks.1.attn.proj.weight": "model-00001-of-00002.safetensors",
420
+ "model.visual.blocks.1.attn.qkv.bias": "model-00001-of-00002.safetensors",
421
+ "model.visual.blocks.1.attn.qkv.weight": "model-00001-of-00002.safetensors",
422
+ "model.visual.blocks.1.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
423
+ "model.visual.blocks.1.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
424
+ "model.visual.blocks.1.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
425
+ "model.visual.blocks.1.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
426
+ "model.visual.blocks.1.norm1.bias": "model-00001-of-00002.safetensors",
427
+ "model.visual.blocks.1.norm1.weight": "model-00001-of-00002.safetensors",
428
+ "model.visual.blocks.1.norm2.bias": "model-00001-of-00002.safetensors",
429
+ "model.visual.blocks.1.norm2.weight": "model-00001-of-00002.safetensors",
430
+ "model.visual.blocks.10.attn.proj.bias": "model-00001-of-00002.safetensors",
431
+ "model.visual.blocks.10.attn.proj.weight": "model-00001-of-00002.safetensors",
432
+ "model.visual.blocks.10.attn.qkv.bias": "model-00001-of-00002.safetensors",
433
+ "model.visual.blocks.10.attn.qkv.weight": "model-00001-of-00002.safetensors",
434
+ "model.visual.blocks.10.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
435
+ "model.visual.blocks.10.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
436
+ "model.visual.blocks.10.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
437
+ "model.visual.blocks.10.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
438
+ "model.visual.blocks.10.norm1.bias": "model-00001-of-00002.safetensors",
439
+ "model.visual.blocks.10.norm1.weight": "model-00001-of-00002.safetensors",
440
+ "model.visual.blocks.10.norm2.bias": "model-00001-of-00002.safetensors",
441
+ "model.visual.blocks.10.norm2.weight": "model-00001-of-00002.safetensors",
442
+ "model.visual.blocks.11.attn.proj.bias": "model-00001-of-00002.safetensors",
443
+ "model.visual.blocks.11.attn.proj.weight": "model-00001-of-00002.safetensors",
444
+ "model.visual.blocks.11.attn.qkv.bias": "model-00001-of-00002.safetensors",
445
+ "model.visual.blocks.11.attn.qkv.weight": "model-00001-of-00002.safetensors",
446
+ "model.visual.blocks.11.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
447
+ "model.visual.blocks.11.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
448
+ "model.visual.blocks.11.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
449
+ "model.visual.blocks.11.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
450
+ "model.visual.blocks.11.norm1.bias": "model-00001-of-00002.safetensors",
451
+ "model.visual.blocks.11.norm1.weight": "model-00001-of-00002.safetensors",
452
+ "model.visual.blocks.11.norm2.bias": "model-00001-of-00002.safetensors",
453
+ "model.visual.blocks.11.norm2.weight": "model-00001-of-00002.safetensors",
454
+ "model.visual.blocks.12.attn.proj.bias": "model-00001-of-00002.safetensors",
455
+ "model.visual.blocks.12.attn.proj.weight": "model-00001-of-00002.safetensors",
456
+ "model.visual.blocks.12.attn.qkv.bias": "model-00001-of-00002.safetensors",
457
+ "model.visual.blocks.12.attn.qkv.weight": "model-00001-of-00002.safetensors",
458
+ "model.visual.blocks.12.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
459
+ "model.visual.blocks.12.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
460
+ "model.visual.blocks.12.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
461
+ "model.visual.blocks.12.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
462
+ "model.visual.blocks.12.norm1.bias": "model-00001-of-00002.safetensors",
463
+ "model.visual.blocks.12.norm1.weight": "model-00001-of-00002.safetensors",
464
+ "model.visual.blocks.12.norm2.bias": "model-00001-of-00002.safetensors",
465
+ "model.visual.blocks.12.norm2.weight": "model-00001-of-00002.safetensors",
466
+ "model.visual.blocks.13.attn.proj.bias": "model-00001-of-00002.safetensors",
467
+ "model.visual.blocks.13.attn.proj.weight": "model-00001-of-00002.safetensors",
468
+ "model.visual.blocks.13.attn.qkv.bias": "model-00001-of-00002.safetensors",
469
+ "model.visual.blocks.13.attn.qkv.weight": "model-00001-of-00002.safetensors",
470
+ "model.visual.blocks.13.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
471
+ "model.visual.blocks.13.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
472
+ "model.visual.blocks.13.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
473
+ "model.visual.blocks.13.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
474
+ "model.visual.blocks.13.norm1.bias": "model-00001-of-00002.safetensors",
475
+ "model.visual.blocks.13.norm1.weight": "model-00001-of-00002.safetensors",
476
+ "model.visual.blocks.13.norm2.bias": "model-00001-of-00002.safetensors",
477
+ "model.visual.blocks.13.norm2.weight": "model-00001-of-00002.safetensors",
478
+ "model.visual.blocks.14.attn.proj.bias": "model-00001-of-00002.safetensors",
479
+ "model.visual.blocks.14.attn.proj.weight": "model-00001-of-00002.safetensors",
480
+ "model.visual.blocks.14.attn.qkv.bias": "model-00001-of-00002.safetensors",
481
+ "model.visual.blocks.14.attn.qkv.weight": "model-00001-of-00002.safetensors",
482
+ "model.visual.blocks.14.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
483
+ "model.visual.blocks.14.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
484
+ "model.visual.blocks.14.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
485
+ "model.visual.blocks.14.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
486
+ "model.visual.blocks.14.norm1.bias": "model-00001-of-00002.safetensors",
487
+ "model.visual.blocks.14.norm1.weight": "model-00001-of-00002.safetensors",
488
+ "model.visual.blocks.14.norm2.bias": "model-00001-of-00002.safetensors",
489
+ "model.visual.blocks.14.norm2.weight": "model-00001-of-00002.safetensors",
490
+ "model.visual.blocks.15.attn.proj.bias": "model-00001-of-00002.safetensors",
491
+ "model.visual.blocks.15.attn.proj.weight": "model-00001-of-00002.safetensors",
492
+ "model.visual.blocks.15.attn.qkv.bias": "model-00001-of-00002.safetensors",
493
+ "model.visual.blocks.15.attn.qkv.weight": "model-00001-of-00002.safetensors",
494
+ "model.visual.blocks.15.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
495
+ "model.visual.blocks.15.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
496
+ "model.visual.blocks.15.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
497
+ "model.visual.blocks.15.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
498
+ "model.visual.blocks.15.norm1.bias": "model-00001-of-00002.safetensors",
499
+ "model.visual.blocks.15.norm1.weight": "model-00001-of-00002.safetensors",
500
+ "model.visual.blocks.15.norm2.bias": "model-00001-of-00002.safetensors",
501
+ "model.visual.blocks.15.norm2.weight": "model-00001-of-00002.safetensors",
502
+ "model.visual.blocks.16.attn.proj.bias": "model-00001-of-00002.safetensors",
503
+ "model.visual.blocks.16.attn.proj.weight": "model-00001-of-00002.safetensors",
504
+ "model.visual.blocks.16.attn.qkv.bias": "model-00001-of-00002.safetensors",
505
+ "model.visual.blocks.16.attn.qkv.weight": "model-00001-of-00002.safetensors",
506
+ "model.visual.blocks.16.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
507
+ "model.visual.blocks.16.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
508
+ "model.visual.blocks.16.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
509
+ "model.visual.blocks.16.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
510
+ "model.visual.blocks.16.norm1.bias": "model-00001-of-00002.safetensors",
511
+ "model.visual.blocks.16.norm1.weight": "model-00001-of-00002.safetensors",
512
+ "model.visual.blocks.16.norm2.bias": "model-00001-of-00002.safetensors",
513
+ "model.visual.blocks.16.norm2.weight": "model-00001-of-00002.safetensors",
514
+ "model.visual.blocks.17.attn.proj.bias": "model-00001-of-00002.safetensors",
515
+ "model.visual.blocks.17.attn.proj.weight": "model-00001-of-00002.safetensors",
516
+ "model.visual.blocks.17.attn.qkv.bias": "model-00001-of-00002.safetensors",
517
+ "model.visual.blocks.17.attn.qkv.weight": "model-00001-of-00002.safetensors",
518
+ "model.visual.blocks.17.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
519
+ "model.visual.blocks.17.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
520
+ "model.visual.blocks.17.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
521
+ "model.visual.blocks.17.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
522
+ "model.visual.blocks.17.norm1.bias": "model-00001-of-00002.safetensors",
523
+ "model.visual.blocks.17.norm1.weight": "model-00001-of-00002.safetensors",
524
+ "model.visual.blocks.17.norm2.bias": "model-00001-of-00002.safetensors",
525
+ "model.visual.blocks.17.norm2.weight": "model-00001-of-00002.safetensors",
526
+ "model.visual.blocks.18.attn.proj.bias": "model-00001-of-00002.safetensors",
527
+ "model.visual.blocks.18.attn.proj.weight": "model-00001-of-00002.safetensors",
528
+ "model.visual.blocks.18.attn.qkv.bias": "model-00001-of-00002.safetensors",
529
+ "model.visual.blocks.18.attn.qkv.weight": "model-00001-of-00002.safetensors",
530
+ "model.visual.blocks.18.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
531
+ "model.visual.blocks.18.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
532
+ "model.visual.blocks.18.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
533
+ "model.visual.blocks.18.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
534
+ "model.visual.blocks.18.norm1.bias": "model-00001-of-00002.safetensors",
535
+ "model.visual.blocks.18.norm1.weight": "model-00001-of-00002.safetensors",
536
+ "model.visual.blocks.18.norm2.bias": "model-00001-of-00002.safetensors",
537
+ "model.visual.blocks.18.norm2.weight": "model-00001-of-00002.safetensors",
538
+ "model.visual.blocks.19.attn.proj.bias": "model-00001-of-00002.safetensors",
539
+ "model.visual.blocks.19.attn.proj.weight": "model-00001-of-00002.safetensors",
540
+ "model.visual.blocks.19.attn.qkv.bias": "model-00001-of-00002.safetensors",
541
+ "model.visual.blocks.19.attn.qkv.weight": "model-00001-of-00002.safetensors",
542
+ "model.visual.blocks.19.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
543
+ "model.visual.blocks.19.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
544
+ "model.visual.blocks.19.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
545
+ "model.visual.blocks.19.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
546
+ "model.visual.blocks.19.norm1.bias": "model-00001-of-00002.safetensors",
547
+ "model.visual.blocks.19.norm1.weight": "model-00001-of-00002.safetensors",
548
+ "model.visual.blocks.19.norm2.bias": "model-00001-of-00002.safetensors",
549
+ "model.visual.blocks.19.norm2.weight": "model-00001-of-00002.safetensors",
550
+ "model.visual.blocks.2.attn.proj.bias": "model-00001-of-00002.safetensors",
551
+ "model.visual.blocks.2.attn.proj.weight": "model-00001-of-00002.safetensors",
552
+ "model.visual.blocks.2.attn.qkv.bias": "model-00001-of-00002.safetensors",
553
+ "model.visual.blocks.2.attn.qkv.weight": "model-00001-of-00002.safetensors",
554
+ "model.visual.blocks.2.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
555
+ "model.visual.blocks.2.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
556
+ "model.visual.blocks.2.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
557
+ "model.visual.blocks.2.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
558
+ "model.visual.blocks.2.norm1.bias": "model-00001-of-00002.safetensors",
559
+ "model.visual.blocks.2.norm1.weight": "model-00001-of-00002.safetensors",
560
+ "model.visual.blocks.2.norm2.bias": "model-00001-of-00002.safetensors",
561
+ "model.visual.blocks.2.norm2.weight": "model-00001-of-00002.safetensors",
562
+ "model.visual.blocks.20.attn.proj.bias": "model-00001-of-00002.safetensors",
563
+ "model.visual.blocks.20.attn.proj.weight": "model-00001-of-00002.safetensors",
564
+ "model.visual.blocks.20.attn.qkv.bias": "model-00001-of-00002.safetensors",
565
+ "model.visual.blocks.20.attn.qkv.weight": "model-00001-of-00002.safetensors",
566
+ "model.visual.blocks.20.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
567
+ "model.visual.blocks.20.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
568
+ "model.visual.blocks.20.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
569
+ "model.visual.blocks.20.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
570
+ "model.visual.blocks.20.norm1.bias": "model-00001-of-00002.safetensors",
571
+ "model.visual.blocks.20.norm1.weight": "model-00001-of-00002.safetensors",
572
+ "model.visual.blocks.20.norm2.bias": "model-00001-of-00002.safetensors",
573
+ "model.visual.blocks.20.norm2.weight": "model-00001-of-00002.safetensors",
574
+ "model.visual.blocks.21.attn.proj.bias": "model-00001-of-00002.safetensors",
575
+ "model.visual.blocks.21.attn.proj.weight": "model-00001-of-00002.safetensors",
576
+ "model.visual.blocks.21.attn.qkv.bias": "model-00001-of-00002.safetensors",
577
+ "model.visual.blocks.21.attn.qkv.weight": "model-00001-of-00002.safetensors",
578
+ "model.visual.blocks.21.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
579
+ "model.visual.blocks.21.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
580
+ "model.visual.blocks.21.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
581
+ "model.visual.blocks.21.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
582
+ "model.visual.blocks.21.norm1.bias": "model-00001-of-00002.safetensors",
583
+ "model.visual.blocks.21.norm1.weight": "model-00001-of-00002.safetensors",
584
+ "model.visual.blocks.21.norm2.bias": "model-00001-of-00002.safetensors",
585
+ "model.visual.blocks.21.norm2.weight": "model-00001-of-00002.safetensors",
586
+ "model.visual.blocks.22.attn.proj.bias": "model-00001-of-00002.safetensors",
587
+ "model.visual.blocks.22.attn.proj.weight": "model-00001-of-00002.safetensors",
588
+ "model.visual.blocks.22.attn.qkv.bias": "model-00001-of-00002.safetensors",
589
+ "model.visual.blocks.22.attn.qkv.weight": "model-00001-of-00002.safetensors",
590
+ "model.visual.blocks.22.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
591
+ "model.visual.blocks.22.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
592
+ "model.visual.blocks.22.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
593
+ "model.visual.blocks.22.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
594
+ "model.visual.blocks.22.norm1.bias": "model-00001-of-00002.safetensors",
595
+ "model.visual.blocks.22.norm1.weight": "model-00001-of-00002.safetensors",
596
+ "model.visual.blocks.22.norm2.bias": "model-00001-of-00002.safetensors",
597
+ "model.visual.blocks.22.norm2.weight": "model-00001-of-00002.safetensors",
598
+ "model.visual.blocks.23.attn.proj.bias": "model-00001-of-00002.safetensors",
599
+ "model.visual.blocks.23.attn.proj.weight": "model-00001-of-00002.safetensors",
600
+ "model.visual.blocks.23.attn.qkv.bias": "model-00001-of-00002.safetensors",
601
+ "model.visual.blocks.23.attn.qkv.weight": "model-00001-of-00002.safetensors",
602
+ "model.visual.blocks.23.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
603
+ "model.visual.blocks.23.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
604
+ "model.visual.blocks.23.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
605
+ "model.visual.blocks.23.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
606
+ "model.visual.blocks.23.norm1.bias": "model-00001-of-00002.safetensors",
607
+ "model.visual.blocks.23.norm1.weight": "model-00001-of-00002.safetensors",
608
+ "model.visual.blocks.23.norm2.bias": "model-00001-of-00002.safetensors",
609
+ "model.visual.blocks.23.norm2.weight": "model-00001-of-00002.safetensors",
610
+ "model.visual.blocks.3.attn.proj.bias": "model-00001-of-00002.safetensors",
611
+ "model.visual.blocks.3.attn.proj.weight": "model-00001-of-00002.safetensors",
612
+ "model.visual.blocks.3.attn.qkv.bias": "model-00001-of-00002.safetensors",
613
+ "model.visual.blocks.3.attn.qkv.weight": "model-00001-of-00002.safetensors",
614
+ "model.visual.blocks.3.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
615
+ "model.visual.blocks.3.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
616
+ "model.visual.blocks.3.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
617
+ "model.visual.blocks.3.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
618
+ "model.visual.blocks.3.norm1.bias": "model-00001-of-00002.safetensors",
619
+ "model.visual.blocks.3.norm1.weight": "model-00001-of-00002.safetensors",
620
+ "model.visual.blocks.3.norm2.bias": "model-00001-of-00002.safetensors",
621
+ "model.visual.blocks.3.norm2.weight": "model-00001-of-00002.safetensors",
622
+ "model.visual.blocks.4.attn.proj.bias": "model-00001-of-00002.safetensors",
623
+ "model.visual.blocks.4.attn.proj.weight": "model-00001-of-00002.safetensors",
624
+ "model.visual.blocks.4.attn.qkv.bias": "model-00001-of-00002.safetensors",
625
+ "model.visual.blocks.4.attn.qkv.weight": "model-00001-of-00002.safetensors",
626
+ "model.visual.blocks.4.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
627
+ "model.visual.blocks.4.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
628
+ "model.visual.blocks.4.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
629
+ "model.visual.blocks.4.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
630
+ "model.visual.blocks.4.norm1.bias": "model-00001-of-00002.safetensors",
631
+ "model.visual.blocks.4.norm1.weight": "model-00001-of-00002.safetensors",
632
+ "model.visual.blocks.4.norm2.bias": "model-00001-of-00002.safetensors",
633
+ "model.visual.blocks.4.norm2.weight": "model-00001-of-00002.safetensors",
634
+ "model.visual.blocks.5.attn.proj.bias": "model-00001-of-00002.safetensors",
635
+ "model.visual.blocks.5.attn.proj.weight": "model-00001-of-00002.safetensors",
636
+ "model.visual.blocks.5.attn.qkv.bias": "model-00001-of-00002.safetensors",
637
+ "model.visual.blocks.5.attn.qkv.weight": "model-00001-of-00002.safetensors",
638
+ "model.visual.blocks.5.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
639
+ "model.visual.blocks.5.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
640
+ "model.visual.blocks.5.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
641
+ "model.visual.blocks.5.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
642
+ "model.visual.blocks.5.norm1.bias": "model-00001-of-00002.safetensors",
643
+ "model.visual.blocks.5.norm1.weight": "model-00001-of-00002.safetensors",
644
+ "model.visual.blocks.5.norm2.bias": "model-00001-of-00002.safetensors",
645
+ "model.visual.blocks.5.norm2.weight": "model-00001-of-00002.safetensors",
646
+ "model.visual.blocks.6.attn.proj.bias": "model-00001-of-00002.safetensors",
647
+ "model.visual.blocks.6.attn.proj.weight": "model-00001-of-00002.safetensors",
648
+ "model.visual.blocks.6.attn.qkv.bias": "model-00001-of-00002.safetensors",
649
+ "model.visual.blocks.6.attn.qkv.weight": "model-00001-of-00002.safetensors",
650
+ "model.visual.blocks.6.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
651
+ "model.visual.blocks.6.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
652
+ "model.visual.blocks.6.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
653
+ "model.visual.blocks.6.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
654
+ "model.visual.blocks.6.norm1.bias": "model-00001-of-00002.safetensors",
655
+ "model.visual.blocks.6.norm1.weight": "model-00001-of-00002.safetensors",
656
+ "model.visual.blocks.6.norm2.bias": "model-00001-of-00002.safetensors",
657
+ "model.visual.blocks.6.norm2.weight": "model-00001-of-00002.safetensors",
658
+ "model.visual.blocks.7.attn.proj.bias": "model-00001-of-00002.safetensors",
659
+ "model.visual.blocks.7.attn.proj.weight": "model-00001-of-00002.safetensors",
660
+ "model.visual.blocks.7.attn.qkv.bias": "model-00001-of-00002.safetensors",
661
+ "model.visual.blocks.7.attn.qkv.weight": "model-00001-of-00002.safetensors",
662
+ "model.visual.blocks.7.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
663
+ "model.visual.blocks.7.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
664
+ "model.visual.blocks.7.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
665
+ "model.visual.blocks.7.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
666
+ "model.visual.blocks.7.norm1.bias": "model-00001-of-00002.safetensors",
667
+ "model.visual.blocks.7.norm1.weight": "model-00001-of-00002.safetensors",
668
+ "model.visual.blocks.7.norm2.bias": "model-00001-of-00002.safetensors",
669
+ "model.visual.blocks.7.norm2.weight": "model-00001-of-00002.safetensors",
670
+ "model.visual.blocks.8.attn.proj.bias": "model-00001-of-00002.safetensors",
671
+ "model.visual.blocks.8.attn.proj.weight": "model-00001-of-00002.safetensors",
672
+ "model.visual.blocks.8.attn.qkv.bias": "model-00001-of-00002.safetensors",
673
+ "model.visual.blocks.8.attn.qkv.weight": "model-00001-of-00002.safetensors",
674
+ "model.visual.blocks.8.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
675
+ "model.visual.blocks.8.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
676
+ "model.visual.blocks.8.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
677
+ "model.visual.blocks.8.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
678
+ "model.visual.blocks.8.norm1.bias": "model-00001-of-00002.safetensors",
679
+ "model.visual.blocks.8.norm1.weight": "model-00001-of-00002.safetensors",
680
+ "model.visual.blocks.8.norm2.bias": "model-00001-of-00002.safetensors",
681
+ "model.visual.blocks.8.norm2.weight": "model-00001-of-00002.safetensors",
682
+ "model.visual.blocks.9.attn.proj.bias": "model-00001-of-00002.safetensors",
683
+ "model.visual.blocks.9.attn.proj.weight": "model-00001-of-00002.safetensors",
684
+ "model.visual.blocks.9.attn.qkv.bias": "model-00001-of-00002.safetensors",
685
+ "model.visual.blocks.9.attn.qkv.weight": "model-00001-of-00002.safetensors",
686
+ "model.visual.blocks.9.mlp.linear_fc1.bias": "model-00001-of-00002.safetensors",
687
+ "model.visual.blocks.9.mlp.linear_fc1.weight": "model-00001-of-00002.safetensors",
688
+ "model.visual.blocks.9.mlp.linear_fc2.bias": "model-00001-of-00002.safetensors",
689
+ "model.visual.blocks.9.mlp.linear_fc2.weight": "model-00001-of-00002.safetensors",
690
+ "model.visual.blocks.9.norm1.bias": "model-00001-of-00002.safetensors",
691
+ "model.visual.blocks.9.norm1.weight": "model-00001-of-00002.safetensors",
692
+ "model.visual.blocks.9.norm2.bias": "model-00001-of-00002.safetensors",
693
+ "model.visual.blocks.9.norm2.weight": "model-00001-of-00002.safetensors",
694
+ "model.visual.deepstack_merger_list.0.linear_fc1.bias": "model-00001-of-00002.safetensors",
695
+ "model.visual.deepstack_merger_list.0.linear_fc1.weight": "model-00001-of-00002.safetensors",
696
+ "model.visual.deepstack_merger_list.0.linear_fc2.bias": "model-00001-of-00002.safetensors",
697
+ "model.visual.deepstack_merger_list.0.linear_fc2.weight": "model-00001-of-00002.safetensors",
698
+ "model.visual.deepstack_merger_list.0.norm.bias": "model-00001-of-00002.safetensors",
699
+ "model.visual.deepstack_merger_list.0.norm.weight": "model-00001-of-00002.safetensors",
700
+ "model.visual.deepstack_merger_list.1.linear_fc1.bias": "model-00001-of-00002.safetensors",
701
+ "model.visual.deepstack_merger_list.1.linear_fc1.weight": "model-00001-of-00002.safetensors",
702
+ "model.visual.deepstack_merger_list.1.linear_fc2.bias": "model-00001-of-00002.safetensors",
703
+ "model.visual.deepstack_merger_list.1.linear_fc2.weight": "model-00001-of-00002.safetensors",
704
+ "model.visual.deepstack_merger_list.1.norm.bias": "model-00001-of-00002.safetensors",
705
+ "model.visual.deepstack_merger_list.1.norm.weight": "model-00001-of-00002.safetensors",
706
+ "model.visual.deepstack_merger_list.2.linear_fc1.bias": "model-00001-of-00002.safetensors",
707
+ "model.visual.deepstack_merger_list.2.linear_fc1.weight": "model-00001-of-00002.safetensors",
708
+ "model.visual.deepstack_merger_list.2.linear_fc2.bias": "model-00001-of-00002.safetensors",
709
+ "model.visual.deepstack_merger_list.2.linear_fc2.weight": "model-00001-of-00002.safetensors",
710
+ "model.visual.deepstack_merger_list.2.norm.bias": "model-00001-of-00002.safetensors",
711
+ "model.visual.deepstack_merger_list.2.norm.weight": "model-00001-of-00002.safetensors",
712
+ "model.visual.merger.linear_fc1.bias": "model-00001-of-00002.safetensors",
713
+ "model.visual.merger.linear_fc1.weight": "model-00001-of-00002.safetensors",
714
+ "model.visual.merger.linear_fc2.bias": "model-00001-of-00002.safetensors",
715
+ "model.visual.merger.linear_fc2.weight": "model-00001-of-00002.safetensors",
716
+ "model.visual.merger.norm.bias": "model-00001-of-00002.safetensors",
717
+ "model.visual.merger.norm.weight": "model-00001-of-00002.safetensors",
718
+ "model.visual.patch_embed.proj.bias": "model-00001-of-00002.safetensors",
719
+ "model.visual.patch_embed.proj.weight": "model-00001-of-00002.safetensors",
720
+ "model.visual.pos_embed.weight": "model-00001-of-00002.safetensors",
721
+ "preference_head.0.bias": "model-00001-of-00002.safetensors",
722
+ "preference_head.0.weight": "model-00001-of-00002.safetensors",
723
+ "preference_head.1.bias": "model-00001-of-00002.safetensors",
724
+ "preference_head.1.weight": "model-00001-of-00002.safetensors",
725
+ "preference_head.4.bias": "model-00001-of-00002.safetensors",
726
+ "preference_head.4.weight": "model-00001-of-00002.safetensors",
727
+ "progress_head.0.bias": "model-00001-of-00002.safetensors",
728
+ "progress_head.0.weight": "model-00001-of-00002.safetensors",
729
+ "progress_head.1.bias": "model-00001-of-00002.safetensors",
730
+ "progress_head.1.weight": "model-00001-of-00002.safetensors",
731
+ "progress_head.4.bias": "model-00001-of-00002.safetensors",
732
+ "progress_head.4.weight": "model-00001-of-00002.safetensors",
733
+ "similarity_head.0.bias": "model-00001-of-00002.safetensors",
734
+ "similarity_head.0.weight": "model-00001-of-00002.safetensors",
735
+ "similarity_head.1.bias": "model-00001-of-00002.safetensors",
736
+ "similarity_head.1.weight": "model-00001-of-00002.safetensors",
737
+ "similarity_head.4.bias": "model-00001-of-00002.safetensors",
738
+ "similarity_head.4.weight": "model-00001-of-00002.safetensors",
739
+ "success_head.0.bias": "model-00001-of-00002.safetensors",
740
+ "success_head.0.weight": "model-00001-of-00002.safetensors",
741
+ "success_head.1.bias": "model-00001-of-00002.safetensors",
742
+ "success_head.1.weight": "model-00001-of-00002.safetensors",
743
+ "success_head.4.bias": "model-00001-of-00002.safetensors",
744
+ "success_head.4.weight": "model-00001-of-00002.safetensors"
745
+ }
746
+ }
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ee6d07f9f33f072bc6f199a3b4fb69371497b873dfd8b658a717513dff217b1
3
+ size 5905