{ "model_type": "transformer", "architecture": "InteractionNet", "hidden_size": 512, "num_attention_heads": 8, "num_hidden_layers": 6, "activation": "relu", "framework": "pytorch" }