{ "model_type": "binaryllm", "architectures": [ "BinaryLLMForCausalLM" ], "auto_map": { "AutoConfig": "configuration_binaryllm.BinaryLLMConfig", "AutoModelForCausalLM": "modeling_binaryllm.BinaryLLMForCausalLM" }, "vocab_size": 8, "hidden_size": 384, "num_hidden_layers": 6, "num_attention_heads": 6, "intermediate_size": 1536, "max_position_embeddings": 64, "dropout": 0.1, "activation": "gelu", "torch_dtype": "float32" }