gribes02 commited on
Commit
e0bbdc9
·
verified ·
1 Parent(s): 4ad8985

Upload folder using huggingface_hub

Browse files
config.json ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "action_dim": 32,
3
+ "action_head_cfg": {
4
+ "action_dim": 32,
5
+ "action_horizon": 16,
6
+ "add_pos_embed": true,
7
+ "backbone_embedding_dim": 2048,
8
+ "diffusion_model_cfg": {
9
+ "attention_head_dim": 48,
10
+ "cross_attention_dim": 2048,
11
+ "dropout": 0.2,
12
+ "final_dropout": true,
13
+ "interleave_self_attention": true,
14
+ "norm_type": "ada_norm",
15
+ "num_attention_heads": 32,
16
+ "num_layers": 16,
17
+ "output_dim": 1024,
18
+ "positional_embeddings": null
19
+ },
20
+ "hidden_size": 1024,
21
+ "input_embedding_dim": 1536,
22
+ "max_action_dim": 32,
23
+ "max_state_dim": 64,
24
+ "model_dtype": "float32",
25
+ "noise_beta_alpha": 1.5,
26
+ "noise_beta_beta": 1.0,
27
+ "noise_s": 0.999,
28
+ "num_inference_timesteps": 4,
29
+ "num_target_vision_tokens": 32,
30
+ "num_timestep_buckets": 1000,
31
+ "tune_diffusion_model": true,
32
+ "tune_projector": true,
33
+ "use_vlln": true,
34
+ "vl_self_attention_cfg": {
35
+ "attention_head_dim": 64,
36
+ "dropout": 0.2,
37
+ "final_dropout": true,
38
+ "num_attention_heads": 32,
39
+ "num_layers": 4,
40
+ "positional_embeddings": null
41
+ }
42
+ },
43
+ "action_horizon": 16,
44
+ "architectures": [
45
+ "GR00T_N1_5"
46
+ ],
47
+ "attn_implementation": null,
48
+ "backbone_cfg": {
49
+ "eagle_path": "NVEagle/eagle_er-qwen3_1_7B-Siglip2_400M_stage1_5_128gpu_er_v7_1mlp_nops",
50
+ "load_bf16": false,
51
+ "project_to_dim": null,
52
+ "reproject_vision": false,
53
+ "select_layer": 12,
54
+ "tune_llm": false,
55
+ "tune_visual": true,
56
+ "use_flash_attention": true
57
+ },
58
+ "compute_dtype": "bfloat16",
59
+ "hidden_size": 2048,
60
+ "model_dtype": "float32",
61
+ "model_type": "gr00t_n1_5",
62
+ "torch_dtype": "bfloat16",
63
+ "transformers_version": "4.51.3"
64
+ }
experiment_cfg/metadata.json ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "new_embodiment": {
3
+ "statistics": {
4
+ "state": {
5
+ "state": {
6
+ "max": [
7
+ 0.20934700965881348,
8
+ -0.9043770432472229,
9
+ 2.3769593238830566,
10
+ -1.1567189693450928,
11
+ -1.3232640027999878,
12
+ 0.20827478170394897
13
+ ],
14
+ "min": [
15
+ -1.3643263578414917,
16
+ -1.883949875831604,
17
+ 0.8309424519538879,
18
+ -2.376842737197876,
19
+ -1.8740051984786987,
20
+ -1.3731602430343628
21
+ ],
22
+ "mean": [
23
+ -0.6751685738563538,
24
+ -1.4234561920166016,
25
+ 1.6375254392623901,
26
+ -1.7832401990890503,
27
+ -1.574040412902832,
28
+ -0.6879891753196716
29
+ ],
30
+ "std": [
31
+ 0.4380858838558197,
32
+ 0.19184131920337677,
33
+ 0.2520727217197418,
34
+ 0.17655931413173676,
35
+ 0.02907581813633442,
36
+ 0.43938571214675903
37
+ ],
38
+ "q01": [
39
+ -1.3641000175476075,
40
+ -1.7913962507247925,
41
+ 1.0616868257522583,
42
+ -2.0909526348114014,
43
+ -1.7122604179382324,
44
+ -1.372908787727356
45
+ ],
46
+ "q99": [
47
+ 0.08337759971618652,
48
+ -0.9879975026845932,
49
+ 2.1825382137298583,
50
+ -1.454513840675354,
51
+ -1.4758443689346312,
52
+ 0.08226962618529798
53
+ ]
54
+ }
55
+ },
56
+ "action": {
57
+ "action": {
58
+ "max": [
59
+ 4.040794849395752,
60
+ 5.006350994110107,
61
+ 3.108442544937134,
62
+ 2.727471351623535,
63
+ 3.8457295894622803,
64
+ 12.414449691772461,
65
+ 1.0
66
+ ],
67
+ "min": [
68
+ -3.390742540359497,
69
+ -7.570286273956299,
70
+ -2.8482439517974854,
71
+ -3.1005160808563232,
72
+ -6.043304443359375,
73
+ -9.58272647857666,
74
+ 0.0
75
+ ],
76
+ "mean": [
77
+ 0.0036002304404973984,
78
+ 0.006521307397633791,
79
+ 0.03644680604338646,
80
+ 0.013165912590920925,
81
+ -0.0031247271690517664,
82
+ 0.0027601178735494614,
83
+ 0.438744455575943
84
+ ],
85
+ "std": [
86
+ 0.9317495226860046,
87
+ 1.4023118019104004,
88
+ 1.0074294805526733,
89
+ 0.2698170244693756,
90
+ 0.288400262594223,
91
+ 0.4214145541191101,
92
+ 0.4962522089481354
93
+ ],
94
+ "q01": [
95
+ -2.684251685142517,
96
+ -3.576507420539856,
97
+ -2.627242934703827,
98
+ -0.7765481585264207,
99
+ -1.3350203585624696,
100
+ -1.533315087556839,
101
+ 0.0
102
+ ],
103
+ "q99": [
104
+ 2.5178834652900703,
105
+ 3.836293096542359,
106
+ 2.622321400642395,
107
+ 1.3134438729286195,
108
+ 0.8244599223136936,
109
+ 0.7647276377677951,
110
+ 1.0
111
+ ]
112
+ }
113
+ }
114
+ },
115
+ "modalities": {
116
+ "video": {
117
+ "webcam": {
118
+ "resolution": [
119
+ 256,
120
+ 256
121
+ ],
122
+ "channels": 3,
123
+ "fps": 30.0
124
+ },
125
+ "wrist": {
126
+ "resolution": [
127
+ 256,
128
+ 256
129
+ ],
130
+ "channels": 3,
131
+ "fps": 30.0
132
+ }
133
+ },
134
+ "state": {
135
+ "state": {
136
+ "absolute": true,
137
+ "rotation_type": null,
138
+ "shape": [
139
+ 6
140
+ ],
141
+ "continuous": true
142
+ }
143
+ },
144
+ "action": {
145
+ "action": {
146
+ "absolute": true,
147
+ "rotation_type": null,
148
+ "shape": [
149
+ 7
150
+ ],
151
+ "continuous": true
152
+ }
153
+ }
154
+ },
155
+ "embodiment_tag": "new_embodiment"
156
+ }
157
+ }
model-00001-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7c57a5b2a733e0a51c29c59369ea710fe9e3d1757b42b9bcc78c087a485f2e2e
3
+ size 4999367032
model-00002-of-00002.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7942d02e1b8d9b7500bbb6f5894ab3c80033dda2286101148fb987f9c480256a
3
+ size 2586705312
model.safetensors.index.json ADDED
The diff for this file is too large to render. See raw diff
 
rng_state.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a35eeb367db1db144b5eb58666e16763754b62067441825ba242e62ddc404964
3
+ size 14244
scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fe8c620e9a7808e10f2bc64fa81fe5b1efb6dcc2acb3596ed32f58119a5368c1
3
+ size 1064
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e5334e770fb8fd14b15c9f64bd999c7f83160863ed45e2047858af8612bd5f05
3
+ size 5368