{ "architectures": ["I3ForCausalLM"], "model_type": "i3", "auto_map": { "AutoConfig": "i3_transformer.configuration_i3.I3Config", "AutoModelForCausalLM": "i3_transformer.modeling_i3.I3ForCausalLM", "AutoTokenizer": "i3_transformer.tokenization_i3.I3Tokenizer" }, "vocab_size": 4466, "d_model": 512, "n_layers": 12, "n_heads": 16, "max_seq_len": 256, "rank": 128, "d_state": 64 }