{ "architectures": ["KimiForCausalLM"], "auto_map": { "AutoConfig": "configuration_kimi.KimiConfig", "AutoModelForCausalLM": "modeling_kimi.KimiForCausalLM", "AutoTokenizer": "tokenization_kimi.TikTokenTokenizer" }, "bos_token": "[BOS]", "eos_token": "[EOS]", "hidden_size": 4096, "intermediate_size": 11008, "max_position_embeddings": 32768, "model_type": "kimi", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 8, "pad_token": "[PAD]", "rope_scaling": { "factor": 2.0, "type": "linear" }, "rope_theta": 10000.0, "rms_norm_eps": 1e-06, "sliding_window": 4096, "tie_word_embeddings": false, "tokenizer_class": "TikTokenTokenizer", "torch_dtype": "bfloat16", "transformers_version": "5.0.0", "use_cache": true, "vocab_size": 32000 }