File size: 222 Bytes
89b33d5 | 1 2 3 4 5 6 7 8 9 10 11 12 13 | {
"model_name": "setu4993/LaBSE",
"num_labels": 2,
"num_layers_to_pool": 6,
"pooling_method": "attention",
"concatenate_layers": true,
"classifier_layers": [
512,
256,
128
],
"max_length": 128
} |