File size: 872 Bytes
27476b3 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 |
{
"WEIGHT_DIMENSIONS": {},
"architectures": [
"RedialRec"
],
"autorec_params": {
"f": "sigmoid",
"g": "sigmoid",
"layer_sizes": [
1000
]
},
"sa_params": {
"hrnn_params": {
"conv_bidirectional": false,
"conversation_encoder_hidden_size": 512,
"conversation_encoder_num_layers": 2,
"sentence_encoder_hidden_size": 512,
"sentence_encoder_model": "princeton-nlp/unsup-simcse-roberta-base",
"sentence_encoder_num_layers": 2,
"use_dropout": 0.4,
"use_movie_occurrences": "word"
},
"multiple_items_per_example": true,
"output_classes": {
"i_liked": 3,
"i_seen": 3,
"i_suggested": 1,
"r_liked": 3,
"r_seen": 3,
"r_suggested": 1
},
"return_liked_probability": true
},
"torch_dtype": "float32",
"transformers_version": "4.33.2"
}
|