gianma's picture
Upload config
d7baa51 verified
raw
history blame contribute delete
532 Bytes
{
"ee_args": {
"desc_input_feature_size": 512,
"desc_model_is_bidirectional": true,
"desc_output_feature_size": 256,
"roomModelArgs": {
"out_channels": 256,
"scene_in_channels": 512
},
"scenesSequenceModelArgs": {
"hidden_size": 256,
"is_bidirectional": true,
"num_features": 256
},
"useLSTM": false
},
"strategy_name": "hier_by_room_rnn_meanPoolProc",
"transformers_version": "4.34.0",
"uses_audio_video_features": null,
"uses_flattened_hierarchy": false
}