File size: 607 Bytes
04cb952 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 |
{
"adapters": {
"0": "msfm/llm-jp-3-1.8b_adapter-1",
"1": "msfm/llm-jp-3-1.8b_adapter-2"
},
"auto_mapping": null,
"base_model_name_or_path": "llm-jp/llm-jp-3-1.8b",
"enable_softmax": true,
"enable_softmax_topk": false,
"global_scaling_weight": 1.0,
"hidden_size": 2048,
"inference_mode": true,
"layerwise_scalings": false,
"peft_type": "XLORA",
"revision": null,
"scaling_pass_value": 0.0,
"softmax_temperature": 1.0,
"task_type": "CAUSAL_LM",
"top_k_lora": null,
"use_trainable_adapters": false,
"xlora_depth": 4,
"xlora_dropout_p": 0.2,
"xlora_size": 2048
} |