EduardoPacheco commited on
Commit
7e1a790
1 Parent(s): 1742405

Upload HieraForPreTraining

Browse files
Files changed (1) hide show
  1. config.json +21 -16
config.json CHANGED
@@ -1,15 +1,9 @@
1
  {
2
- "_out_features": [
3
- "stage4"
4
- ],
5
- "_out_indices": [
6
- 4
7
- ],
8
  "architectures": [
9
  "HieraForPreTraining"
10
  ],
11
  "decoder_depth": 8,
12
- "decoder_embed_dim": 512,
13
  "decoder_num_heads": 16,
14
  "depths": [
15
  1,
@@ -22,12 +16,11 @@
22
  "embed_dim_multiplier": 2.0,
23
  "hidden_act": "gelu",
24
  "hidden_size": 768,
25
- "initial_num_heads": 1,
26
- "initializer_range": 0.02,
27
- "input_size": [
28
  224,
29
  224
30
  ],
 
31
  "layer_norm_eps": 1e-06,
32
  "layer_norm_init": 1.0,
33
  "mask_ratio": 0.6,
@@ -45,16 +38,28 @@
45
  "model_type": "hiera",
46
  "norm_pix_loss": true,
47
  "num_channels": 3,
48
- "num_head_multiplier": 2.0,
 
 
 
 
 
 
49
  "num_query_pool": 2,
50
- "patch_kernel": [
51
- 7,
52
- 7
 
 
53
  ],
54
  "patch_padding": [
55
  3,
56
  3
57
  ],
 
 
 
 
58
  "patch_stride": [
59
  4,
60
  4
@@ -63,7 +68,6 @@
63
  2,
64
  2
65
  ],
66
- "sep_pos_embed": false,
67
  "stage_names": [
68
  "stem",
69
  "stage1",
@@ -72,5 +76,6 @@
72
  "stage4"
73
  ],
74
  "torch_dtype": "float32",
75
- "transformers_version": "4.40.0.dev0"
 
76
  }
 
1
  {
 
 
 
 
 
 
2
  "architectures": [
3
  "HieraForPreTraining"
4
  ],
5
  "decoder_depth": 8,
6
+ "decoder_hidden_size": 512,
7
  "decoder_num_heads": 16,
8
  "depths": [
9
  1,
 
16
  "embed_dim_multiplier": 2.0,
17
  "hidden_act": "gelu",
18
  "hidden_size": 768,
19
+ "image_size": [
 
 
20
  224,
21
  224
22
  ],
23
+ "initializer_range": 0.02,
24
  "layer_norm_eps": 1e-06,
25
  "layer_norm_init": 1.0,
26
  "mask_ratio": 0.6,
 
38
  "model_type": "hiera",
39
  "norm_pix_loss": true,
40
  "num_channels": 3,
41
+ "num_heads": [
42
+ 1,
43
+ 2,
44
+ 4,
45
+ 8
46
+ ],
47
+ "num_layers": 4,
48
  "num_query_pool": 2,
49
+ "out_features": [
50
+ "stage4"
51
+ ],
52
+ "out_indices": [
53
+ 4
54
  ],
55
  "patch_padding": [
56
  3,
57
  3
58
  ],
59
+ "patch_size": [
60
+ 7,
61
+ 7
62
+ ],
63
  "patch_stride": [
64
  4,
65
  4
 
68
  2,
69
  2
70
  ],
 
71
  "stage_names": [
72
  "stem",
73
  "stage1",
 
76
  "stage4"
77
  ],
78
  "torch_dtype": "float32",
79
+ "transformers_version": "4.41.0.dev0",
80
+ "use_separate_position_embedding": false
81
  }