Balcony-Model18 / config.json
adpretko's picture
Upload folder using huggingface_hub
2cc452f verified
{
"_name_or_path": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse",
"architectures": [
"NestedLlamaForCausalLM"
],
"attention_bias": true,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse--configuration_celerity.CelerityConfig",
"AutoModel": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse--modeling_celerity.CelerityModel",
"AutoModelForCausalLM": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse--modeling_celerity.CelerityForCausalLM",
"AutoModelForQuestionAnswering": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse--modeling_celerity.CelerityForQuestionAnswering",
"AutoModelForSequenceClassification": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse--modeling_celerity.CelerityForSequenceClassification",
"AutoModelForTokenClassification": "melhoushi/slimpj_h1152_d23_gbs76_tpp20.0_lp0.2_linear_linear_reverse--modeling_celerity.CelerityForTokenClassification"
},
"bos_token_id": 1,
"embeddings_scale": 1.0,
"eos_token_id": 2,
"exit_attention": false,
"exit_decoder_layer": true,
"exit_layer_indices": [
3,
6,
12,
17
],
"exit_mlp": false,
"head_dim": 128,
"hidden_act": "relu2",
"hidden_size": 1152,
"initializer_range": 0.02,
"intermediate_size": 9216,
"max_position_embeddings": 8192,
"mhsa_residual_scale": 0.037584782608695654,
"mlp_bias": true,
"mlp_residual_scale": 0.021978260869565214,
"model_type": "nested_llama",
"norm_type": "layernorm",
"num_attention_heads": 9,
"num_hidden_layers": 23,
"num_key_value_heads": 9,
"output_exit_layers": [
3,
6,
12,
17
],
"output_full_model": true,
"output_logits_scale": 0.0008680555555555555,
"position_embedding_type": "alibi",
"pretraining_tp": 1,
"rms_norm_eps": 1e-05,
"rope_scaling": null,
"rope_theta": 10000.0,
"scale_qk_dot_by_d": true,
"tie_exit_lm_head": true,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.48.0",
"tuple_outputs": true,
"use_cache": false,
"use_mlp_gating": false,
"vocab_size": 128256
}