| { |
| "model_name": "functiongemma-270m", |
| "architecture": "gemma3", |
| "hidden_size": 640, |
| "num_hidden_layers": 18, |
| "num_layers": 18, |
| "num_attention_heads": 4, |
| "num_key_value_heads": 1, |
| "head_dim": 256, |
| "intermediate_size": 2048, |
| "vocab_size": 262144, |
| "context_length": 2048, |
| "sliding_window": 512, |
| "sliding_window_pattern": 6, |
| "layer_types": [ |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "sliding_attention", |
| "full_attention" |
| ], |
| "embed_scale": 25.298221281347036, |
| "rope_theta_global": 1000000.0, |
| "rope_theta_local": 10000.0, |
| "query_pre_attn_scalar": 256.0, |
| "rms_norm_eps": 1e-06, |
| "bos_token_id": 2, |
| "eos_token_id": [ |
| 1, |
| 50 |
| ], |
| "tie_word_embeddings": true, |
| "final_logit_softcapping": 0.0, |
| "parts": { |
| "model": "model.mlpackage" |
| }, |
| "quantization": "int8", |
| "compute_units": "CPU_AND_NE", |
| "tokenizer_repo": "google/functiongemma-270m-it", |
| "chat_format": "functiongemma", |
| "function_call_markers": { |
| "start": "<start_function_call>", |
| "end": "<end_function_call>" |
| } |
| } |