File size: 768 Bytes
69705cb |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 |
{
"WEIGHT_DIMENSIONS": {
"decoder.embedding.dtype": "torch.float32",
"decoder.embedding.shape": [
15005,
300
]
},
"architectures": [
"RedialGen"
],
"decoder_params": {
"context_size": 256,
"hidden_size": 256,
"num_layers": 1,
"peephole": false
},
"hrnn_params": {
"conv_bidirectional": false,
"conversation_encoder_hidden_size": 256,
"conversation_encoder_num_layers": 1,
"sentence_encoder_hidden_size": 256,
"sentence_encoder_model": "princeton-nlp/unsup-simcse-roberta-base",
"sentence_encoder_num_layers": 1,
"use_dropout": false,
"use_movie_occurrences": false
},
"n_movies": 6924,
"torch_dtype": "float32",
"transformers_version": "4.33.2",
"vocab_size": 15005
}
|