File size: 1,442 Bytes
1efc73d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
{
  "model_type": "Gr00tN1d6",
  "model_dtype": "bfloat16",
  "model_name": "nvidia/Eagle-Block2A-2B-v2",
  "backbone_model_type": "eagle",
  "model_revision": null,
  "tune_top_llm_layers": 4,
  "backbone_embedding_dim": 2048,
  "tune_llm": false,
  "tune_visual": false,
  "select_layer": 16,
  "reproject_vision": false,
  "use_flash_attention": true,
  "load_bf16": true,
  "collator_overwrite_image_inputs": false,
  "eagle_collator": true,
  "backbone_trainable_params_fp32": true,
  "apply_sincos_state_encoding": true,
  "use_relative_action": true,
  "max_state_dim": 128,
  "max_action_dim": 128,
  "action_horizon": 50,
  "hidden_size": 1024,
  "input_embedding_dim": 1536,
  "add_pos_embed": true,
  "attn_dropout": 0.2,
  "use_vlln": true,
  "max_seq_len": 1024,
  "use_alternate_vl_dit": true,
  "attend_text_every_n_blocks": 2,
  "diffusion_model_cfg": {
    "attention_head_dim": 48,
    "dropout": 0.2,
    "final_dropout": true,
    "interleave_self_attention": true,
    "norm_type": "ada_norm",
    "num_attention_heads": 32,
    "num_layers": 32,
    "output_dim": 1024,
    "positional_embeddings": null
  },
  "num_inference_timesteps": 4,
  "noise_beta_alpha": 1.5,
  "noise_beta_beta": 1.0,
  "noise_s": 0.999,
  "num_timestep_buckets": 1000,
  "tune_projector": true,
  "tune_diffusion_model": true,
  "tune_vlln": true,
  "state_dropout_prob": 0.0,
  "state_additive_noise_scale": 0.0,
  "max_num_embodiments": 32
}