File size: 542 Bytes
0777477 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 | {
"config": {
"architecture": "prefix_tuning",
"bottleneck_size": 512,
"cross_prefix": true,
"dropout": 0.0,
"encoder_prefix": true,
"flat": false,
"leave_out": [],
"non_linearity": "tanh",
"prefix_length": 30,
"shared_gating": true,
"use_gating": false
},
"config_id": "648bf22f5afeaaa6",
"hidden_size": 2048,
"model_class": "LlamaForCausalLM",
"model_name": "meta-llama/Llama-3.2-1B-Instruct",
"model_type": "llama",
"name": "llama23-1b-prefix-is",
"version": "adapters.1.0.0"
} |