chflame163's picture
Upload 12 files
46a95ef verified
raw
history blame
837 Bytes
{
"_commit_hash": null,
"architectures": [
"VitMatteForImageMatting"
],
"backbone_config": {
"hidden_size": 384,
"image_size": 512,
"model_type": "vitdet",
"num_attention_heads": 6,
"num_channels": 4,
"out_features": [
"stage12"
],
"out_indices": [
12
],
"residual_block_indices": [
2,
5,
8,
11
],
"use_relative_position_embeddings": true,
"window_block_indices": [
0,
1,
3,
4,
6,
7,
9,
10
],
"window_size": 14
},
"convstream_hidden_sizes": [
48,
96,
192
],
"fusion_hidden_sizes": [
256,
128,
64,
32
],
"hidden_size": 384,
"initializer_range": 0.02,
"model_type": "vitmatte",
"torch_dtype": "float32",
"transformers_version": null
}