File size: 1,415 Bytes
4beb280
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
{
    "model_config": {
        "propagation_layer_config": {
            "prop_type": "matching",
            "node_state_dim": 10,
            "edge_hidden_sizes": [
                20
            ],
            "node_hidden_sizes": [
                10
            ],
            "edge_net_init_scale": 0.1,
            "node_update_type": "gru",
            "use_reverse_direction": true,
            "reverse_dir_param_different": false,
            "layer_norm": false
        },
        "propagation_steps": 5,
        "alignment_feature_dim": 16,
        "scoring": "aggregated",
        "interaction_alignment": "attention",
        "interaction_alignment_preprocessor_type": "identity",
        "interaction_when": "post",
        "encoder_config": {
            "node_hidden_sizes": [
                10
            ],
            "node_feature_dim": 1,
            "edge_hidden_sizes": null,
            "edge_feature_dim": 1
        },
        "attention_config": {
            "temperature": 0.1
        },
        "aggregator_config": {
            "node_hidden_sizes": [
                10
            ],
            "graph_transform_sizes": [
                10
            ],
            "input_size": [
                10
            ],
            "gated": true,
            "aggregation_type": "sum"
        }
    },
    "name": "gmn_baseline_scoring=agg___tp=attention_pp=identity_when=post"
}