hhhJB commited on
Commit
03489cc
·
verified ·
1 Parent(s): f707214

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +55 -0
config.json ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "accelerator": null,
3
+ "action_dim": 128,
4
+ "ema": {
5
+ "inv_gamma": 1.0,
6
+ "max_value": 0.9999,
7
+ "min_value": 0.0,
8
+ "power": 0.75,
9
+ "update_after_step": 0
10
+ },
11
+ "enc_type": "theia-base-vit",
12
+ "img_adaptor": "mlp2x_gelu",
13
+ "img_cond_len": 4374,
14
+ "img_pos_embed_config": [
15
+ [
16
+ "image",
17
+ [
18
+ 2,
19
+ 3,
20
+ -729
21
+ ]
22
+ ]
23
+ ],
24
+ "img_token_dim": 1152,
25
+ "lang_adaptor": "mlp2x_gelu",
26
+ "lang_pos_embed_config": [
27
+ [
28
+ "lang",
29
+ -1024
30
+ ]
31
+ ],
32
+ "lang_token_dim": 4096,
33
+ "learnable_tokens": 196,
34
+ "lora_config": null,
35
+ "max_lang_cond_len": 1024,
36
+ "noise_scheduler": {
37
+ "beta_schedule": "squaredcos_cap_v2",
38
+ "clip_sample": false,
39
+ "num_inference_timesteps": 5,
40
+ "num_train_timesteps": 1000,
41
+ "prediction_type": "sample",
42
+ "type": "ddpm"
43
+ },
44
+ "pred_horizon": 64,
45
+ "rdt": {
46
+ "cond_pos_embed_type": "multimodal",
47
+ "depth": 28,
48
+ "hidden_size": 2048,
49
+ "num_heads": 32
50
+ },
51
+ "resolution": 256,
52
+ "state_adaptor": "mlp3x_gelu",
53
+ "state_token_dim": 128,
54
+ "use_lora": false
55
+ }