lombardata commited on
Commit
cabfa90
1 Parent(s): bec300d

Upload config.json

Browse files
Files changed (1) hide show
  1. config.json +104 -88
config.json CHANGED
@@ -1,89 +1,105 @@
1
  {
2
- "_name_or_path": "facebook/dinov2-large",
3
- "apply_layernorm": true,
4
- "architectures": [
5
- "Dinov2ForImageClassification"
6
- ],
7
- "attention_probs_dropout_prob": 0.0,
8
- "drop_path_rate": 0.0,
9
- "hidden_act": "gelu",
10
- "hidden_dropout_prob": 0.0,
11
- "hidden_size": 1024,
12
- "id2label": {
13
- "0": "Acropore_branched",
14
- "1": "Acropore_digitised",
15
- "2": "Acropore_tabular",
16
- "3": "Algae",
17
- "4": "Dead_coral",
18
- "5": "Millepore",
19
- "6": "No_acropore_encrusting",
20
- "7": "No_acropore_massive",
21
- "8": "No_acropore_sub_massive",
22
- "9": "Rock",
23
- "10": "Rubble",
24
- "11": "Sand"
25
- },
26
- "image_size": 164,
27
- "initializer_range": 0.02,
28
- "label2id": {
29
- "Acropore_branched": 0,
30
- "Acropore_digitised": 1,
31
- "Acropore_tabular": 2,
32
- "Algae": 3,
33
- "Dead_coral": 4,
34
- "Millepore": 5,
35
- "No_acropore_encrusting": 6,
36
- "No_acropore_massive": 7,
37
- "No_acropore_sub_massive": 8,
38
- "Rock": 9,
39
- "Rubble": 10,
40
- "Sand": 11
41
- },
42
- "layer_norm_eps": 1e-06,
43
- "layerscale_value": 1.0,
44
- "mlp_ratio": 4,
45
- "model_type": "dinov2",
46
- "num_attention_heads": 16,
47
- "num_channels": 3,
48
- "num_hidden_layers": 24,
49
- "out_features": [
50
- "stage24"
51
- ],
52
- "out_indices": [
53
- 24
54
- ],
55
- "patch_size": 14,
56
- "problem_type": "multi_label_classification",
57
- "qkv_bias": true,
58
- "reshape_hidden_states": true,
59
- "stage_names": [
60
- "stem",
61
- "stage1",
62
- "stage2",
63
- "stage3",
64
- "stage4",
65
- "stage5",
66
- "stage6",
67
- "stage7",
68
- "stage8",
69
- "stage9",
70
- "stage10",
71
- "stage11",
72
- "stage12",
73
- "stage13",
74
- "stage14",
75
- "stage15",
76
- "stage16",
77
- "stage17",
78
- "stage18",
79
- "stage19",
80
- "stage20",
81
- "stage21",
82
- "stage22",
83
- "stage23",
84
- "stage24"
85
- ],
86
- "torch_dtype": "float32",
87
- "transformers_version": "4.41.0",
88
- "use_swiglu_ffn": false
89
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  {
2
+ "_name_or_path": "facebook/dinov2-large",
3
+ "apply_layernorm": true,
4
+ "architectures": [
5
+ "Dinov2ForImageClassification"
6
+ ],
7
+ "attention_probs_dropout_prob": 0.0,
8
+ "drop_path_rate": 0.0,
9
+ "hidden_act": "gelu",
10
+ "hidden_dropout_prob": 0.0,
11
+ "hidden_size": 1024,
12
+ "id2label": {
13
+ "0": "Acropore_branched",
14
+ "1": "Acropore_digitised",
15
+ "2": "Acropore_tabular",
16
+ "3": "Algae",
17
+ "4": "Dead_coral",
18
+ "5": "Millepore",
19
+ "6": "No_acropore_encrusting",
20
+ "7": "No_acropore_massive",
21
+ "8": "No_acropore_sub_massive",
22
+ "9": "Rock",
23
+ "10": "Rubble",
24
+ "11": "Sand"
25
+ },
26
+ "image_size": 164,
27
+ "initializer_range": 0.02,
28
+ "label2id": {
29
+ "Acropore_branched": 0,
30
+ "Acropore_digitised": 1,
31
+ "Acropore_tabular": 2,
32
+ "Algae": 3,
33
+ "Dead_coral": 4,
34
+ "Millepore": 5,
35
+ "No_acropore_encrusting": 6,
36
+ "No_acropore_massive": 7,
37
+ "No_acropore_sub_massive": 8,
38
+ "Rock": 9,
39
+ "Rubble": 10,
40
+ "Sand": 11
41
+ },
42
+ "layer_norm_eps": 1e-06,
43
+ "layerscale_value": 1.0,
44
+ "mlp_ratio": 4,
45
+ "model_type": "dinov2",
46
+ "num_attention_heads": 16,
47
+ "num_channels": 3,
48
+ "num_hidden_layers": 24,
49
+ "out_features": [
50
+ "stage24"
51
+ ],
52
+ "out_indices": [
53
+ 24
54
+ ],
55
+ "patch_size": 14,
56
+ "problem_type": "multi_label_classification",
57
+ "qkv_bias": true,
58
+ "reshape_hidden_states": true,
59
+ "stage_names": [
60
+ "stem",
61
+ "stage1",
62
+ "stage2",
63
+ "stage3",
64
+ "stage4",
65
+ "stage5",
66
+ "stage6",
67
+ "stage7",
68
+ "stage8",
69
+ "stage9",
70
+ "stage10",
71
+ "stage11",
72
+ "stage12",
73
+ "stage13",
74
+ "stage14",
75
+ "stage15",
76
+ "stage16",
77
+ "stage17",
78
+ "stage18",
79
+ "stage19",
80
+ "stage20",
81
+ "stage21",
82
+ "stage22",
83
+ "stage23",
84
+ "stage24"
85
+ ],
86
+ "torch_dtype": "float32",
87
+ "transformers_version": "4.41.0",
88
+ "use_swiglu_ffn": false,
89
+ "initial_learning_rate": 0.001,
90
+ "train_batch_size": 16,
91
+ "eval_batch_size": 16,
92
+ "optimizer": {
93
+ "type": "Adam"
94
+ },
95
+ "lr_scheduler_type": {
96
+ "type": "ReduceLROnPlateau"
97
+ },
98
+ "patience_lr_scheduler": 5,
99
+ "factor_lr_scheduler": 0.1,
100
+ "weight_decay": 0.0001,
101
+ "early_stopping_patience": 10,
102
+ "freeze_encoder": true,
103
+ "data_augmentation": true,
104
+ "num_epochs": 150
105
+ }