{ "base_model": "Alibaba-NLP/gte-modernbert-base", "variant": "conservative", "hidden_size": 768, "num_heads": 12, "replaced_layers": { "1": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "2": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "4": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "5": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "7": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "8": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "10": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "11": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "13": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "14": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "16": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "17": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "19": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] }, "20": { "was_global": false, "transferred": [ "Q->R", "K->K", "V->V", "O->O" ] } }, "total_params": 173872910 }