{ "model_type": "slopiest", "vocab_size": 50257, "embeddings_size": 384, "head_size": 6, "layer_size": 6, "block_size": 256, "bos_token_id": 50256, "eos_token_id": 50256, "auto_map": { "AutoConfig": "model.SlopiestConfig", "AutoModelForCausalLM": "model.SlopiestForCausalLM" } }