yipjiaqi commited on
Commit
b6b950a
1 Parent(s): 8113b3b

Push model using huggingface_hub.

Browse files
Files changed (1) hide show
  1. config.json +1 -1
config.json CHANGED
@@ -1 +1 @@
1
- {"config_name": "spgm_base", "encoder_kernel_size": 16, "encoder_in_nchannels": 1, "encoder_out_nchannels": 256, "masknet_chunksize": 250, "masknet_numlayers": 4, "masknet_norm": "ln", "masknet_useextralinearlayer": false, "masknet_extraskipconnection": true, "masknet_numspks": 2, "intra_numlayers": 8, "intra_nhead": 8, "intra_dffn": 1024, "intra_dropout": 0, "intra_use_positional": true, "intra_norm_before": true, "spgm_block_pool": "att", "spgm_block_att_h": null, "spgm_block_att_dropout": 0}
 
1
+ {"model_type": "spgm", "config_name": "spgm_base", "encoder_kernel_size": 16, "encoder_in_nchannels": 1, "encoder_out_nchannels": 256, "masknet_chunksize": 250, "masknet_numlayers": 4, "masknet_norm": "ln", "masknet_useextralinearlayer": false, "masknet_extraskipconnection": true, "masknet_numspks": 2, "intra_numlayers": 8, "intra_nhead": 8, "intra_dffn": 1024, "intra_dropout": 0, "intra_use_positional": true, "intra_norm_before": true, "spgm_block_pool": "att", "spgm_block_att_h": null, "spgm_block_att_dropout": 0}