File size: 306 Bytes
32202c1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 |
{
"architectures": [
"MultiHeadQAModel"
],
"dropout": 0.2,
"heads_config": {
"closing": 1,
"hold": 2,
"listening": 5,
"opening": 1,
"proactiveness": 3,
"resolution": 5
},
"model_type": "multihead-qa",
"torch_dtype": "float32",
"transformers_version": "4.52.4"
}
|