ksmcg commited on
Commit
e66ff02
1 Parent(s): 32d7776

Upload config

Browse files
Files changed (1) hide show
  1. config.json +2 -7
config.json CHANGED
@@ -1,9 +1,5 @@
1
  {
2
- "_name_or_path": "ksmcg/fan_large_16_p4_hybrid",
3
- "architectures": [
4
- "FANForImageClassification"
5
- ],
6
- "attn_drop_rate": 0.0,
7
  "backbone": "hybrid",
8
  "channel_dims": null,
9
  "channels": 256,
@@ -21,7 +17,6 @@
21
  1024
22
  ],
23
  "drop_path_rate": 0.0,
24
- "drop_rate": 0.0,
25
  "eta": 1.0,
26
  "feat_downsample": false,
27
  "feature_strides": [
@@ -32,6 +27,7 @@
32
  ],
33
  "head_init_scale": 0.001,
34
  "hidden_act": "gelu",
 
35
  "hidden_size": 480,
36
  "hybrid_patch_size": 2,
37
  "id2label": {
@@ -1069,7 +1065,6 @@
1069
  "semantic_loss_ignore_index": -100,
1070
  "sharpen_attn": false,
1071
  "tokens_norm": true,
1072
- "torch_dtype": "float32",
1073
  "transformers_version": "4.24.0.dev0",
1074
  "use_head": false,
1075
  "use_pos_embed": true
 
1
  {
2
+ "attention_probs_dropout_prob": 0.0,
 
 
 
 
3
  "backbone": "hybrid",
4
  "channel_dims": null,
5
  "channels": 256,
 
17
  1024
18
  ],
19
  "drop_path_rate": 0.0,
 
20
  "eta": 1.0,
21
  "feat_downsample": false,
22
  "feature_strides": [
 
27
  ],
28
  "head_init_scale": 0.001,
29
  "hidden_act": "gelu",
30
+ "hidden_dropout_prob": 0.0,
31
  "hidden_size": 480,
32
  "hybrid_patch_size": 2,
33
  "id2label": {
 
1065
  "semantic_loss_ignore_index": -100,
1066
  "sharpen_attn": false,
1067
  "tokens_norm": true,
 
1068
  "transformers_version": "4.24.0.dev0",
1069
  "use_head": false,
1070
  "use_pos_embed": true