turkish-embedding-v3 / config.json
fkuyumcu's picture
Update config.json
d26cb87 verified
raw
history blame contribute delete
690 Bytes
{
"architectures": [
"GemmaForCausalLM"
],
"model_type": "gemma3",
"framework": "sentence-transformers",
"__version__": {
"sentence_transformers": "2.2.2",
"transformers": "4.41.0"
},
"modules": [
{
"type": "sentence_transformers.models.Transformer",
"model_name_or_path": "google/gemma-3-4b-it",
"max_seq_length": 128,
"do_lower_case": false
},
{
"type": "sentence_transformers.models.Pooling",
"pooling_mode_cls_token": false,
"pooling_mode_mean_tokens": true,
"pooling_mode_max_tokens": false
}
],
"name": "Turkish-Gemma3-Embedding-Model",
"dimension": 2560,
"normalize_embeddings": true
}