SinjaeKang commited on
Commit
44c75eb
·
verified ·
1 Parent(s): 9efa160

Upload BASE_seed0_40k/config.json with huggingface_hub

Browse files
Files changed (1) hide show
  1. BASE_seed0_40k/config.json +65 -0
BASE_seed0_40k/config.json ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "action_dim": 32,
3
+ "action_head_cfg": {
4
+ "action_dim": 32,
5
+ "action_head_type": "flowmatching",
6
+ "action_horizon": 16,
7
+ "add_pos_embed": true,
8
+ "backbone_embedding_dim": 2048,
9
+ "diffusion_model_cfg": {
10
+ "attention_head_dim": 48,
11
+ "cross_attention_dim": 2048,
12
+ "dropout": 0.2,
13
+ "final_dropout": true,
14
+ "interleave_self_attention": true,
15
+ "norm_type": "ada_norm",
16
+ "num_attention_heads": 32,
17
+ "num_layers": 16,
18
+ "output_dim": 1024,
19
+ "positional_embeddings": null
20
+ },
21
+ "hidden_size": 1024,
22
+ "input_embedding_dim": 1536,
23
+ "max_action_dim": 32,
24
+ "max_state_dim": 64,
25
+ "model_dtype": "float32",
26
+ "noise_beta_alpha": 1.5,
27
+ "noise_beta_beta": 1.0,
28
+ "noise_s": 0.999,
29
+ "num_inference_timesteps": 4,
30
+ "num_target_vision_tokens": 32,
31
+ "num_timestep_buckets": 1000,
32
+ "tune_diffusion_model": true,
33
+ "tune_projector": true,
34
+ "use_vlln": true,
35
+ "vl_self_attention_cfg": {
36
+ "attention_head_dim": 64,
37
+ "dropout": 0.2,
38
+ "final_dropout": true,
39
+ "num_attention_heads": 32,
40
+ "num_layers": 4,
41
+ "positional_embeddings": null
42
+ }
43
+ },
44
+ "action_horizon": 16,
45
+ "architectures": [
46
+ "GR00T_N1_5"
47
+ ],
48
+ "attn_implementation": null,
49
+ "backbone_cfg": {
50
+ "eagle_path": "NVEagle/eagle_er-qwen3_1_7B-Siglip2_400M_stage1_5_128gpu_er_v7_1mlp_nops",
51
+ "load_bf16": false,
52
+ "project_to_dim": null,
53
+ "reproject_vision": false,
54
+ "select_layer": 12,
55
+ "tune_llm": false,
56
+ "tune_visual": true,
57
+ "use_flash_attention": true
58
+ },
59
+ "compute_dtype": "bfloat16",
60
+ "hidden_size": 2048,
61
+ "model_dtype": "float32",
62
+ "model_type": "gr00t_n1_5",
63
+ "torch_dtype": "bfloat16",
64
+ "transformers_version": "4.51.3"
65
+ }