| { | |
| "model_type": "rslm", | |
| "architectures": [ | |
| "RSLMForCausalLM" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_rslm.RSLMConfig", | |
| "AutoModelForCausalLM": "modeling_rslm.RSLMForCausalLM" | |
| }, | |
| "hidden_size": 2048, | |
| "num_layers": 24, | |
| "num_q_heads": 16, | |
| "num_kv_heads": 1, | |
| "head_dim": 128, | |
| "intermediate_size": 4352, | |
| "vocab_size": 65536, | |
| "max_position_embeddings": 262144, | |
| "original_max_position_embeddings": 8192, | |
| "rope_theta": 1000000.0, | |
| "rope_scaling": { | |
| "type": "yarn", | |
| "factor": 32.0, | |
| "original_max_position_embeddings": 8192 | |
| }, | |
| "window_size": 4096, | |
| "global_layers_0idx": [ | |
| 5, | |
| 11, | |
| 17, | |
| 23 | |
| ], | |
| "evict_local_kv": true, | |
| "local_cache_keep": 4096, | |
| "parallel_block": true, | |
| "tie_word_embeddings": true, | |
| "rms_norm_eps": 1e-06, | |
| "hidden_act": "swiglu", | |
| "bos_token_id": 1, | |
| "eos_token_id": 2, | |
| "pad_token_id": 0, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "manual-no-transformers-during-push" | |
| } |