{ "model_name": "setu4993/LaBSE", "num_labels": 2, "num_layers_to_pool": 6, "pooling_method": "attention", "concatenate_layers": true, "classifier_layers": [ 512, 256, 128 ], "max_length": 128 }