| { |
| "architectures": [ |
| "LlamaForCausalLM" |
| ], |
| "attention_bias": false, |
| "attention_dropout": 0.0, |
| "bos_token_id": 128000, |
| "eos_token_id": [ |
| 128001, |
| 128008, |
| 128009 |
| ], |
| "head_dim": 128, |
| "hidden_act": "silu", |
| "hidden_size": 3072, |
| "initializer_range": 0.02, |
| "intermediate_size": 8192, |
| "max_position_embeddings": 131072, |
| "mlp_bias": false, |
| "model_type": "llama", |
| "num_attention_heads": 24, |
| "num_hidden_layers": 28, |
| "num_key_value_heads": 8, |
| "pretraining_tp": 1, |
| "rms_norm_eps": 1e-05, |
| "rope_scaling": { |
| "factor": 32.0, |
| "high_freq_factor": 4.0, |
| "low_freq_factor": 1.0, |
| "original_max_position_embeddings": 8192, |
| "rope_type": "llama3" |
| }, |
| "rope_theta": 500000.0, |
| "tie_word_embeddings": true, |
| "torch_dtype": "bfloat16", |
| "transformers_version": "4.45.0.dev0", |
| "use_cache": true, |
| "vocab_size": 128256, |
| "alpha_pattern": {}, |
| "auto_mapping": null, |
| "base_model_name_or_path": "HuggingFaceTB/SmolLM2-135M-Instruct", |
| "bias": "none", |
| "eva_config": null, |
| "exclude_modules": null, |
| "fan_in_fan_out": false, |
| "inference_mode": true, |
| "init_lora_weights": true, |
| "layer_replication": null, |
| "layers_pattern": null, |
| "layers_to_transform": null, |
| "loftq_config": {}, |
| "lora_alpha": 8, |
| "lora_bias": false, |
| "lora_dropout": 0.05, |
| "megatron_config": null, |
| "megatron_core": "megatron.core", |
| "modules_to_save": null, |
| "peft_type": "LORA", |
| "r": 8, |
| "rank_pattern": {}, |
| "target_modules": [ |
| "v_proj", |
| "fc_out", |
| "k_proj", |
| "q_proj", |
| "o_proj", |
| "fc_in", |
| "gate_proj", |
| "down_proj", |
| "up_proj" |
| ], |
| "task_type": "CAUSAL_LM", |
| "use_dora": false, |
| "use_rslora": true |
| } |
|
|