{ "architectures": [ "CarmaniaModel" ], "attention_window_size": 128, "auto_map": { "AutoConfig": "configuration_carmania.CarmaniaConfig", "AutoModel": "modeling_carmania.CarmaniaModel", "AutoTokenizer": "tokenization_carmania.CarmaniaTokenizer" }, "hidden_size": 1024, "initializer_range": 0.02, "intermediate_size": 4608, "layer_norm_epsilon": 1e-06, "mlp_hidden_act": "silu", "model_type": "carmania", "num_attention_heads": 16, "num_key_value_heads": 4, "num_layers": 5, "rope_base": 10000, "seq_length": 10000, "torch_dtype": "float32", "transformers_version": "4.52.4", "use_positional_embedding": true, "vocab_size": 5 }