{ "architectures": [ "LUAR" ], "embedding_dim": 512, "epsilon": 1e-09, "gradient_checkpointing": false, "model_name_or_path": "sentence-transformers/all-MiniLM-L6-v2", "model_type": "luar", "torch_dtype": "bfloat16", "transformers_version": "4.52.3" }