ksmcg commited on
Commit
a7e1b7d
1 Parent(s): 76893db

Upload config

Browse files
Files changed (1) hide show
  1. config.json +3 -5
config.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "act_layer": null,
 
3
  "attn_drop_rate": 0.0,
4
  "backbone": "hybrid",
5
  "c_head_num": null,
@@ -11,7 +12,6 @@
11
  "decoder_params": {
12
  "embed_dim": 768
13
  },
14
- "depth": 22,
15
  "depths": [
16
  3,
17
  5
@@ -24,7 +24,6 @@
24
  ],
25
  "drop_path_rate": 0.0,
26
  "drop_rate": 0.0,
27
- "dropout_ratio": 0.1,
28
  "embed_dim": 480,
29
  "eta": 1.0,
30
  "feat_downsample": false,
@@ -56,10 +55,9 @@
56
  "mlp_ratio": 4.0,
57
  "model_type": "fan",
58
  "norm_layer": null,
59
- "num_attention_heads": 8,
60
  "num_classes": 1000,
61
- "num_heads": 10,
62
- "num_hidden_layers": 12,
63
  "out_index": 18,
64
  "patch_size": 16,
65
  "qkv_bias": true,
 
1
  {
2
  "act_layer": null,
3
+ "align_corners": false,
4
  "attn_drop_rate": 0.0,
5
  "backbone": "hybrid",
6
  "c_head_num": null,
 
12
  "decoder_params": {
13
  "embed_dim": 768
14
  },
 
15
  "depths": [
16
  3,
17
  5
 
24
  ],
25
  "drop_path_rate": 0.0,
26
  "drop_rate": 0.0,
 
27
  "embed_dim": 480,
28
  "eta": 1.0,
29
  "feat_downsample": false,
 
55
  "mlp_ratio": 4.0,
56
  "model_type": "fan",
57
  "norm_layer": null,
58
+ "num_attention_heads": 10,
59
  "num_classes": 1000,
60
+ "num_hidden_layers": 22,
 
61
  "out_index": 18,
62
  "patch_size": 16,
63
  "qkv_bias": true,