{ "_name_or_path": "/mnt/algorithm/user_dir/fangyue/.cache/huggingface/hub/models--OpenLLMAI--Llama-2-7b-sft-model-ocra-500k/snapshots/6fce8d06f40f2359ec447ab7f33333ffd2019b47", "architectures": [ "LlamaForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "attention_probs_dropout_prob": 0, "bos_token_id": 1, "classifier_dropout_prob": 0.1, "embedding_size": 128, "eos_token_id": 2, "hidden_act": "silu", "hidden_dropout_prob": 0, "hidden_size": 4096, "initializer_range": 0.02, "inner_group_num": 1, "intermediate_size": 11008, "layer_norm_eps": 1e-12, "max_position_embeddings": 4096, "model_type": "albert", "num_attention_heads": 32, "num_hidden_groups": 1, "num_hidden_layers": 32, "num_key_value_heads": 32, "pad_token_id": 2, "position_embedding_type": "absolute", "pretraining_tp": 1, "rms_norm_eps": 1e-05, "rope_scaling": null, "rope_theta": 10000.0, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.40.0", "type_vocab_size": 2, "use_cache": true, "vocab_size": 32000 }