| { | |
| "alignments": "linear", | |
| "architectures": [ | |
| "LangBridgeModel" | |
| ], | |
| "dim_enc": 1024, | |
| "dim_lm": 4096, | |
| "enc": "castorini/afriteva_v2_large", | |
| "enc_lora_path": "cmea/afriteva_v2_large-lora_adapter_16_4", | |
| "freeze_encoder": true, | |
| "freeze_language_model": true, | |
| "lm": "meta-math/MetaMath-Mistral-7B", | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.37.2" | |
| } | |