File size: 847 Bytes
b8f4b1f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 | {
"_attn_implementation_autoset": true,
"model_config": {
"attention_map": "softmax",
"attention_method": "linear",
"dim_embed": 256,
"dim_feedforward": 1024,
"dim_ffn_u_model": 1024,
"dim_hidden_u_model": 256,
"dim_max_trajectory": 3,
"dropout": 0.1,
"num_context_encoder_layers": 2,
"num_heads": 8,
"num_res_layer_u_model": 6,
"num_res_layers_functional_decoder": 8,
"use_bias_for_projection": true,
"use_bias_in_attention": true,
"use_query_residual_in_attention": true
},
"train_config": {
"corruption_model_type": "odeformer",
"loss_filter_nans": true,
"loss_type": "l1",
"max_sigma_trajectory_noise": 0.06,
"max_subsampling_ration": 0.5,
"train_type": "vector_field",
"train_with_normalized_head": true
},
"transformers_version": "4.46.0"
}
|