{ "activation": "relu", "architectures": [ "TransformerMLPSparseEncoder" ], "dense": true, "model_type": "MLP", "norm": "log1p", "scale": 1.0, "tf_base_model_name_or_dir": "distilbert-base-uncased", "torch_dtype": "float32", "transformers_version": "4.36.0" }