| { |
| "architectures": [ |
| "GemmaForCausalLM" |
| ], |
| "model_type": "gemma3", |
| "framework": "sentence-transformers", |
| "__version__": { |
| "sentence_transformers": "2.2.2", |
| "transformers": "4.41.0" |
| }, |
| "modules": [ |
| { |
| "type": "sentence_transformers.models.Transformer", |
| "model_name_or_path": "google/gemma-3-4b-it", |
| "max_seq_length": 128, |
| "do_lower_case": false |
| }, |
| { |
| "type": "sentence_transformers.models.Pooling", |
| "pooling_mode_cls_token": false, |
| "pooling_mode_mean_tokens": true, |
| "pooling_mode_max_tokens": false |
| } |
| ], |
| "name": "Turkish-Gemma3-Embedding-Model", |
| "dimension": 2560, |
| "normalize_embeddings": true |
| } |