File size: 274 Bytes
34148c4 | 1 2 3 4 5 6 7 8 9 10 11 12 13 | {
"architectures": [
"LUAR"
],
"embedding_dim": 512,
"epsilon": 1e-09,
"gradient_checkpointing": false,
"model_name_or_path": "sentence-transformers/all-MiniLM-L6-v2",
"model_type": "luar",
"torch_dtype": "bfloat16",
"transformers_version": "4.52.3"
}
|