File size: 2,165 Bytes
1c97481
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
{
  "_attn_implementation_autoset": true,
  "architectures": [
    "FIMSDE"
  ],
  "dropout_rate": 0.1,
  "dtype": "float32",
  "finetune": false,
  "finetune_detach_diffusion": false,
  "finetune_em_steps": 1,
  "finetune_num_points": -1,
  "finetune_on_sampling_mse": false,
  "finetune_on_sampling_nll": false,
  "finetune_samples_count": 1,
  "finetune_samples_steps": 1,
  "hidden_act": {
    "name": "torch.nn.GELU"
  },
  "hidden_layers": [
    256,
    256
  ],
  "learnable_loss_scales": {
    "attention": {
      "activation": "torch.nn.GELU",
      "dim_feedforward": 1024,
      "dropout": 0.1,
      "nhead": 8
    },
    "num_res_layers": 8,
    "paths_block_attention": false,
    "projection": {
      "dropout": 0.1,
      "hidden_act": {
        "name": "torch.nn.GELU"
      },
      "hidden_layers": [
        256,
        256
      ],
      "name": "fim.models.blocks.base.MLP"
    }
  },
  "max_dimension": 3,
  "model_embedding_size": 256,
  "model_type": "fimsde",
  "name": "FIMSDE",
  "num_locations_on_path": 32,
  "operator": {
    "attention": {
      "activation": "torch.nn.GELU",
      "dim_feedforward": 1024,
      "dropout": 0.1,
      "nhead": 8
    },
    "num_res_layers": 8,
    "paths_block_attention": false,
    "projection": {
      "dropout": 0.1,
      "hidden_act": {
        "name": "torch.nn.GELU"
      },
      "hidden_layers": [
        256,
        256
      ],
      "name": "fim.models.blocks.base.MLP"
    }
  },
  "phi_0t": {
    "name": "torch.nn.Linear"
  },
  "phi_0x": {
    "name": "torch.nn.Linear"
  },
  "phi_1x": {
    "name": "torch.nn.Linear"
  },
  "psi_1": {
    "layer": {
      "activation": "torch.nn.GELU",
      "attn_method": "linear",
      "dim_feedforward": 1024,
      "dropout": 0.1,
      "lin_feature_map": "softmax",
      "lin_normalize": false,
      "nhead": 8
    },
    "name": "CombinedPathTransformer",
    "num_layers": 2
  },
  "residual_ff_size": 1024,
  "states_norm": {
    "name": "fim.models.sde.Standardization"
  },
  "times_norm": {
    "name": "fim.models.sde.DeltaLogCentering"
  },
  "transformer_layer_activation": "gelu",
  "transformers_version": "4.57.1"
}