File size: 196 Bytes
91ad132 | 1 2 3 4 5 6 7 8 9 10 | {
"model_type": "transformer",
"architecture": "InteractionNet",
"hidden_size": 512,
"num_attention_heads": 8,
"num_hidden_layers": 6,
"activation": "relu",
"framework": "pytorch"
}
|
91ad132 | 1 2 3 4 5 6 7 8 9 10 | {
"model_type": "transformer",
"architecture": "InteractionNet",
"hidden_size": 512,
"num_attention_heads": 8,
"num_hidden_layers": 6,
"activation": "relu",
"framework": "pytorch"
}
|