gianma's picture
Upload config
cc02687 verified
raw
history blame contribute delete
573 Bytes
{
"ee_args": {
"desc_input_feature_size": 512,
"desc_model_is_bidirectional": true,
"desc_output_feature_size": 256,
"roomModelArgs": {
"out_channels": 256,
"scene_in_channels": 512,
"skip_last_linear": true
},
"scenesSequenceModelArgs": {
"out_channels": 256,
"scene_in_channels": 256,
"skip_last_linear": false
},
"useLSTM": false
},
"strategy_name": "hier_by_room_rnn_2_meanPoolProc",
"transformers_version": "4.34.0",
"uses_audio_video_features": null,
"uses_flattened_hierarchy": false
}