Saving best model to hub
Browse files- README.md +166 -0
- config.json +48 -0
- model.safetensors +3 -0
- test-logits.npz +3 -0
- test-references.npz +3 -0
- training_args.bin +3 -0
- validation-logits.npz +3 -0
- validation-references.npz +3 -0
README.md
ADDED
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
license: apache-2.0
|
3 |
+
base_model: WinKawaks/vit-tiny-patch16-224
|
4 |
+
tags:
|
5 |
+
- generated_from_trainer
|
6 |
+
metrics:
|
7 |
+
- accuracy
|
8 |
+
model-index:
|
9 |
+
- name: dit-base_tobacco-tiny_tobacco3482_kd_NKD_t1.0_g1.5
|
10 |
+
results: []
|
11 |
+
---
|
12 |
+
|
13 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
14 |
+
should probably proofread and complete it, then remove this comment. -->
|
15 |
+
|
16 |
+
# dit-base_tobacco-tiny_tobacco3482_kd_NKD_t1.0_g1.5
|
17 |
+
|
18 |
+
This model is a fine-tuned version of [WinKawaks/vit-tiny-patch16-224](https://huggingface.co/WinKawaks/vit-tiny-patch16-224) on the None dataset.
|
19 |
+
It achieves the following results on the evaluation set:
|
20 |
+
- Loss: 3.1418
|
21 |
+
- Accuracy: 0.84
|
22 |
+
- Brier Loss: 0.2718
|
23 |
+
- Nll: 0.9778
|
24 |
+
- F1 Micro: 0.8400
|
25 |
+
- F1 Macro: 0.8296
|
26 |
+
- Ece: 0.1553
|
27 |
+
- Aurc: 0.0479
|
28 |
+
|
29 |
+
## Model description
|
30 |
+
|
31 |
+
More information needed
|
32 |
+
|
33 |
+
## Intended uses & limitations
|
34 |
+
|
35 |
+
More information needed
|
36 |
+
|
37 |
+
## Training and evaluation data
|
38 |
+
|
39 |
+
More information needed
|
40 |
+
|
41 |
+
## Training procedure
|
42 |
+
|
43 |
+
### Training hyperparameters
|
44 |
+
|
45 |
+
The following hyperparameters were used during training:
|
46 |
+
- learning_rate: 0.0001
|
47 |
+
- train_batch_size: 128
|
48 |
+
- eval_batch_size: 128
|
49 |
+
- seed: 42
|
50 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
51 |
+
- lr_scheduler_type: linear
|
52 |
+
- lr_scheduler_warmup_ratio: 0.1
|
53 |
+
- num_epochs: 100
|
54 |
+
|
55 |
+
### Training results
|
56 |
+
|
57 |
+
| Training Loss | Epoch | Step | Validation Loss | Accuracy | Brier Loss | Nll | F1 Micro | F1 Macro | Ece | Aurc |
|
58 |
+
|:-------------:|:-----:|:----:|:---------------:|:--------:|:----------:|:------:|:--------:|:--------:|:------:|:------:|
|
59 |
+
| No log | 1.0 | 7 | 5.6749 | 0.2 | 0.9075 | 8.1551 | 0.2000 | 0.1380 | 0.2949 | 0.8075 |
|
60 |
+
| No log | 2.0 | 14 | 5.1602 | 0.15 | 0.8781 | 7.4212 | 0.15 | 0.1415 | 0.2402 | 0.7606 |
|
61 |
+
| No log | 3.0 | 21 | 4.4947 | 0.46 | 0.7066 | 2.8419 | 0.46 | 0.4202 | 0.3230 | 0.3244 |
|
62 |
+
| No log | 4.0 | 28 | 3.9789 | 0.555 | 0.5827 | 1.6986 | 0.555 | 0.5525 | 0.2796 | 0.2218 |
|
63 |
+
| No log | 5.0 | 35 | 3.6991 | 0.65 | 0.4828 | 1.7197 | 0.65 | 0.6491 | 0.2315 | 0.1491 |
|
64 |
+
| No log | 6.0 | 42 | 3.6495 | 0.68 | 0.4691 | 1.8258 | 0.68 | 0.6586 | 0.2555 | 0.1358 |
|
65 |
+
| No log | 7.0 | 49 | 3.3912 | 0.75 | 0.3899 | 1.8385 | 0.75 | 0.7276 | 0.2237 | 0.0920 |
|
66 |
+
| No log | 8.0 | 56 | 3.3055 | 0.71 | 0.3792 | 1.5754 | 0.7100 | 0.6922 | 0.2130 | 0.1023 |
|
67 |
+
| No log | 9.0 | 63 | 3.3535 | 0.72 | 0.3836 | 1.7076 | 0.72 | 0.7195 | 0.2015 | 0.0978 |
|
68 |
+
| No log | 10.0 | 70 | 3.1877 | 0.785 | 0.3190 | 1.5736 | 0.785 | 0.7582 | 0.1905 | 0.0693 |
|
69 |
+
| No log | 11.0 | 77 | 3.5578 | 0.72 | 0.3812 | 2.2613 | 0.72 | 0.7241 | 0.1684 | 0.0846 |
|
70 |
+
| No log | 12.0 | 84 | 3.3589 | 0.775 | 0.3389 | 1.3228 | 0.775 | 0.7540 | 0.1665 | 0.0764 |
|
71 |
+
| No log | 13.0 | 91 | 3.1097 | 0.805 | 0.2856 | 1.4183 | 0.805 | 0.7929 | 0.1603 | 0.0484 |
|
72 |
+
| No log | 14.0 | 98 | 3.2661 | 0.815 | 0.3146 | 1.9097 | 0.815 | 0.8066 | 0.1753 | 0.0636 |
|
73 |
+
| No log | 15.0 | 105 | 3.3637 | 0.755 | 0.3361 | 1.5166 | 0.755 | 0.7492 | 0.1804 | 0.0720 |
|
74 |
+
| No log | 16.0 | 112 | 3.1495 | 0.8 | 0.2994 | 1.4586 | 0.8000 | 0.7926 | 0.1714 | 0.0604 |
|
75 |
+
| No log | 17.0 | 119 | 3.1573 | 0.8 | 0.2941 | 1.6755 | 0.8000 | 0.7899 | 0.1545 | 0.0577 |
|
76 |
+
| No log | 18.0 | 126 | 3.4445 | 0.77 | 0.3416 | 1.4075 | 0.7700 | 0.7503 | 0.1620 | 0.0807 |
|
77 |
+
| No log | 19.0 | 133 | 3.1292 | 0.805 | 0.2816 | 1.3835 | 0.805 | 0.7815 | 0.1768 | 0.0526 |
|
78 |
+
| No log | 20.0 | 140 | 3.4253 | 0.75 | 0.3459 | 2.0430 | 0.75 | 0.7591 | 0.1697 | 0.0706 |
|
79 |
+
| No log | 21.0 | 147 | 3.1645 | 0.81 | 0.3000 | 1.7363 | 0.81 | 0.8113 | 0.1711 | 0.0614 |
|
80 |
+
| No log | 22.0 | 154 | 3.0823 | 0.815 | 0.2791 | 1.5997 | 0.815 | 0.8020 | 0.1417 | 0.0556 |
|
81 |
+
| No log | 23.0 | 161 | 2.9898 | 0.83 | 0.2521 | 1.4274 | 0.83 | 0.8189 | 0.1589 | 0.0434 |
|
82 |
+
| No log | 24.0 | 168 | 3.0915 | 0.83 | 0.2770 | 1.3516 | 0.83 | 0.8173 | 0.1495 | 0.0538 |
|
83 |
+
| No log | 25.0 | 175 | 3.0405 | 0.825 | 0.2621 | 1.5191 | 0.825 | 0.8048 | 0.1329 | 0.0494 |
|
84 |
+
| No log | 26.0 | 182 | 3.0621 | 0.815 | 0.2735 | 1.0698 | 0.815 | 0.7955 | 0.1617 | 0.0522 |
|
85 |
+
| No log | 27.0 | 189 | 3.0228 | 0.835 | 0.2650 | 1.4235 | 0.835 | 0.8315 | 0.1565 | 0.0502 |
|
86 |
+
| No log | 28.0 | 196 | 3.0677 | 0.82 | 0.2778 | 1.5299 | 0.82 | 0.8165 | 0.1660 | 0.0557 |
|
87 |
+
| No log | 29.0 | 203 | 3.0272 | 0.825 | 0.2699 | 1.4726 | 0.825 | 0.8204 | 0.1643 | 0.0491 |
|
88 |
+
| No log | 30.0 | 210 | 3.1090 | 0.815 | 0.2892 | 1.3258 | 0.815 | 0.8026 | 0.1585 | 0.0536 |
|
89 |
+
| No log | 31.0 | 217 | 3.1069 | 0.81 | 0.2866 | 1.5638 | 0.81 | 0.8050 | 0.1473 | 0.0557 |
|
90 |
+
| No log | 32.0 | 224 | 3.0374 | 0.815 | 0.2765 | 1.2895 | 0.815 | 0.8045 | 0.1476 | 0.0527 |
|
91 |
+
| No log | 33.0 | 231 | 3.0503 | 0.815 | 0.2750 | 1.3113 | 0.815 | 0.7975 | 0.1531 | 0.0517 |
|
92 |
+
| No log | 34.0 | 238 | 2.9852 | 0.82 | 0.2613 | 1.4575 | 0.82 | 0.8110 | 0.1600 | 0.0448 |
|
93 |
+
| No log | 35.0 | 245 | 3.0437 | 0.83 | 0.2724 | 1.3491 | 0.83 | 0.8205 | 0.1622 | 0.0571 |
|
94 |
+
| No log | 36.0 | 252 | 3.0098 | 0.82 | 0.2717 | 1.2671 | 0.82 | 0.8055 | 0.1567 | 0.0519 |
|
95 |
+
| No log | 37.0 | 259 | 3.0025 | 0.845 | 0.2599 | 1.2628 | 0.845 | 0.8255 | 0.1342 | 0.0481 |
|
96 |
+
| No log | 38.0 | 266 | 3.1854 | 0.805 | 0.3015 | 1.2550 | 0.805 | 0.7956 | 0.1560 | 0.0601 |
|
97 |
+
| No log | 39.0 | 273 | 3.0704 | 0.82 | 0.2793 | 1.2393 | 0.82 | 0.8057 | 0.1566 | 0.0557 |
|
98 |
+
| No log | 40.0 | 280 | 3.0739 | 0.825 | 0.2842 | 1.2701 | 0.825 | 0.8169 | 0.1371 | 0.0513 |
|
99 |
+
| No log | 41.0 | 287 | 3.0465 | 0.835 | 0.2747 | 1.2598 | 0.835 | 0.8302 | 0.1449 | 0.0538 |
|
100 |
+
| No log | 42.0 | 294 | 3.0691 | 0.825 | 0.2773 | 1.1796 | 0.825 | 0.8137 | 0.1372 | 0.0511 |
|
101 |
+
| No log | 43.0 | 301 | 3.0734 | 0.84 | 0.2732 | 1.1765 | 0.8400 | 0.8282 | 0.1564 | 0.0565 |
|
102 |
+
| No log | 44.0 | 308 | 3.0262 | 0.845 | 0.2622 | 1.2152 | 0.845 | 0.8306 | 0.1457 | 0.0541 |
|
103 |
+
| No log | 45.0 | 315 | 3.0610 | 0.835 | 0.2727 | 1.2249 | 0.835 | 0.8261 | 0.1606 | 0.0544 |
|
104 |
+
| No log | 46.0 | 322 | 3.0358 | 0.84 | 0.2767 | 1.1020 | 0.8400 | 0.8323 | 0.1416 | 0.0527 |
|
105 |
+
| No log | 47.0 | 329 | 2.9893 | 0.835 | 0.2650 | 1.1536 | 0.835 | 0.8252 | 0.1386 | 0.0493 |
|
106 |
+
| No log | 48.0 | 336 | 3.0498 | 0.84 | 0.2726 | 1.1253 | 0.8400 | 0.8320 | 0.1302 | 0.0535 |
|
107 |
+
| No log | 49.0 | 343 | 2.9816 | 0.845 | 0.2585 | 1.2068 | 0.845 | 0.8355 | 0.1455 | 0.0451 |
|
108 |
+
| No log | 50.0 | 350 | 3.0431 | 0.835 | 0.2686 | 1.0596 | 0.835 | 0.8238 | 0.1542 | 0.0540 |
|
109 |
+
| No log | 51.0 | 357 | 3.0200 | 0.835 | 0.2639 | 1.1806 | 0.835 | 0.8290 | 0.1434 | 0.0501 |
|
110 |
+
| No log | 52.0 | 364 | 3.0217 | 0.845 | 0.2664 | 1.0846 | 0.845 | 0.8324 | 0.1671 | 0.0503 |
|
111 |
+
| No log | 53.0 | 371 | 3.0255 | 0.84 | 0.2649 | 1.1803 | 0.8400 | 0.8318 | 0.1350 | 0.0488 |
|
112 |
+
| No log | 54.0 | 378 | 3.0069 | 0.835 | 0.2616 | 1.2057 | 0.835 | 0.8190 | 0.1284 | 0.0496 |
|
113 |
+
| No log | 55.0 | 385 | 3.0609 | 0.815 | 0.2746 | 1.0378 | 0.815 | 0.7970 | 0.1422 | 0.0490 |
|
114 |
+
| No log | 56.0 | 392 | 3.0111 | 0.84 | 0.2622 | 1.1806 | 0.8400 | 0.8341 | 0.1428 | 0.0513 |
|
115 |
+
| No log | 57.0 | 399 | 3.0050 | 0.84 | 0.2643 | 1.1898 | 0.8400 | 0.8299 | 0.1452 | 0.0494 |
|
116 |
+
| No log | 58.0 | 406 | 3.0426 | 0.84 | 0.2662 | 1.0337 | 0.8400 | 0.8307 | 0.1397 | 0.0514 |
|
117 |
+
| No log | 59.0 | 413 | 3.0427 | 0.835 | 0.2682 | 1.0309 | 0.835 | 0.8247 | 0.1453 | 0.0491 |
|
118 |
+
| No log | 60.0 | 420 | 3.0449 | 0.83 | 0.2744 | 1.0039 | 0.83 | 0.8141 | 0.1436 | 0.0484 |
|
119 |
+
| No log | 61.0 | 427 | 3.0524 | 0.83 | 0.2729 | 1.1480 | 0.83 | 0.8162 | 0.1454 | 0.0477 |
|
120 |
+
| No log | 62.0 | 434 | 3.0290 | 0.835 | 0.2610 | 1.1757 | 0.835 | 0.8264 | 0.1476 | 0.0506 |
|
121 |
+
| No log | 63.0 | 441 | 3.0574 | 0.83 | 0.2712 | 1.0242 | 0.83 | 0.8161 | 0.1464 | 0.0485 |
|
122 |
+
| No log | 64.0 | 448 | 3.0436 | 0.835 | 0.2684 | 1.1326 | 0.835 | 0.8267 | 0.1417 | 0.0470 |
|
123 |
+
| No log | 65.0 | 455 | 3.0170 | 0.84 | 0.2610 | 1.1095 | 0.8400 | 0.8289 | 0.1520 | 0.0492 |
|
124 |
+
| No log | 66.0 | 462 | 3.0176 | 0.835 | 0.2623 | 1.1140 | 0.835 | 0.8225 | 0.1262 | 0.0459 |
|
125 |
+
| No log | 67.0 | 469 | 3.0712 | 0.84 | 0.2735 | 1.0884 | 0.8400 | 0.8296 | 0.1421 | 0.0516 |
|
126 |
+
| No log | 68.0 | 476 | 3.0258 | 0.84 | 0.2670 | 1.1388 | 0.8400 | 0.8279 | 0.1478 | 0.0461 |
|
127 |
+
| No log | 69.0 | 483 | 3.0838 | 0.835 | 0.2707 | 1.0937 | 0.835 | 0.8232 | 0.1425 | 0.0477 |
|
128 |
+
| No log | 70.0 | 490 | 3.1076 | 0.82 | 0.2819 | 1.0030 | 0.82 | 0.7998 | 0.1507 | 0.0480 |
|
129 |
+
| No log | 71.0 | 497 | 3.0696 | 0.84 | 0.2725 | 1.0175 | 0.8400 | 0.8349 | 0.1567 | 0.0501 |
|
130 |
+
| 2.6485 | 72.0 | 504 | 3.0535 | 0.84 | 0.2676 | 1.0079 | 0.8400 | 0.8253 | 0.1351 | 0.0477 |
|
131 |
+
| 2.6485 | 73.0 | 511 | 3.0326 | 0.83 | 0.2667 | 0.9792 | 0.83 | 0.8093 | 0.1334 | 0.0464 |
|
132 |
+
| 2.6485 | 74.0 | 518 | 3.0271 | 0.835 | 0.2616 | 1.0865 | 0.835 | 0.8193 | 0.1223 | 0.0454 |
|
133 |
+
| 2.6485 | 75.0 | 525 | 3.0894 | 0.83 | 0.2732 | 0.9764 | 0.83 | 0.8123 | 0.1446 | 0.0489 |
|
134 |
+
| 2.6485 | 76.0 | 532 | 3.0905 | 0.835 | 0.2730 | 1.0736 | 0.835 | 0.8232 | 0.1578 | 0.0485 |
|
135 |
+
| 2.6485 | 77.0 | 539 | 3.0507 | 0.84 | 0.2646 | 1.0716 | 0.8400 | 0.8279 | 0.1424 | 0.0469 |
|
136 |
+
| 2.6485 | 78.0 | 546 | 3.0981 | 0.845 | 0.2712 | 0.9916 | 0.845 | 0.8324 | 0.1452 | 0.0508 |
|
137 |
+
| 2.6485 | 79.0 | 553 | 3.0820 | 0.84 | 0.2728 | 0.9791 | 0.8400 | 0.8296 | 0.1403 | 0.0473 |
|
138 |
+
| 2.6485 | 80.0 | 560 | 3.0978 | 0.84 | 0.2733 | 0.9864 | 0.8400 | 0.8296 | 0.1480 | 0.0485 |
|
139 |
+
| 2.6485 | 81.0 | 567 | 3.0936 | 0.84 | 0.2716 | 0.9955 | 0.8400 | 0.8296 | 0.1483 | 0.0474 |
|
140 |
+
| 2.6485 | 82.0 | 574 | 3.0937 | 0.845 | 0.2685 | 0.9875 | 0.845 | 0.8324 | 0.1459 | 0.0486 |
|
141 |
+
| 2.6485 | 83.0 | 581 | 3.0940 | 0.84 | 0.2719 | 0.9863 | 0.8400 | 0.8296 | 0.1481 | 0.0470 |
|
142 |
+
| 2.6485 | 84.0 | 588 | 3.0745 | 0.84 | 0.2656 | 1.0795 | 0.8400 | 0.8323 | 0.1460 | 0.0476 |
|
143 |
+
| 2.6485 | 85.0 | 595 | 3.1089 | 0.845 | 0.2681 | 1.0050 | 0.845 | 0.8324 | 0.1568 | 0.0492 |
|
144 |
+
| 2.6485 | 86.0 | 602 | 3.0880 | 0.84 | 0.2695 | 1.0607 | 0.8400 | 0.8296 | 0.1409 | 0.0474 |
|
145 |
+
| 2.6485 | 87.0 | 609 | 3.0848 | 0.84 | 0.2666 | 0.9996 | 0.8400 | 0.8296 | 0.1425 | 0.0470 |
|
146 |
+
| 2.6485 | 88.0 | 616 | 3.1144 | 0.84 | 0.2682 | 0.9937 | 0.8400 | 0.8296 | 0.1380 | 0.0482 |
|
147 |
+
| 2.6485 | 89.0 | 623 | 3.1316 | 0.84 | 0.2711 | 0.9884 | 0.8400 | 0.8296 | 0.1484 | 0.0490 |
|
148 |
+
| 2.6485 | 90.0 | 630 | 3.1312 | 0.84 | 0.2726 | 0.9732 | 0.8400 | 0.8296 | 0.1525 | 0.0488 |
|
149 |
+
| 2.6485 | 91.0 | 637 | 3.1312 | 0.84 | 0.2723 | 0.9794 | 0.8400 | 0.8296 | 0.1475 | 0.0481 |
|
150 |
+
| 2.6485 | 92.0 | 644 | 3.1426 | 0.84 | 0.2731 | 0.9728 | 0.8400 | 0.8296 | 0.1478 | 0.0491 |
|
151 |
+
| 2.6485 | 93.0 | 651 | 3.1351 | 0.84 | 0.2709 | 0.9741 | 0.8400 | 0.8296 | 0.1438 | 0.0483 |
|
152 |
+
| 2.6485 | 94.0 | 658 | 3.1390 | 0.84 | 0.2716 | 0.9764 | 0.8400 | 0.8296 | 0.1576 | 0.0483 |
|
153 |
+
| 2.6485 | 95.0 | 665 | 3.1366 | 0.84 | 0.2711 | 0.9795 | 0.8400 | 0.8296 | 0.1480 | 0.0484 |
|
154 |
+
| 2.6485 | 96.0 | 672 | 3.1337 | 0.84 | 0.2710 | 0.9828 | 0.8400 | 0.8296 | 0.1475 | 0.0478 |
|
155 |
+
| 2.6485 | 97.0 | 679 | 3.1431 | 0.84 | 0.2723 | 0.9767 | 0.8400 | 0.8296 | 0.1587 | 0.0480 |
|
156 |
+
| 2.6485 | 98.0 | 686 | 3.1388 | 0.84 | 0.2713 | 0.9808 | 0.8400 | 0.8296 | 0.1476 | 0.0480 |
|
157 |
+
| 2.6485 | 99.0 | 693 | 3.1420 | 0.84 | 0.2718 | 0.9778 | 0.8400 | 0.8296 | 0.1560 | 0.0480 |
|
158 |
+
| 2.6485 | 100.0 | 700 | 3.1418 | 0.84 | 0.2718 | 0.9778 | 0.8400 | 0.8296 | 0.1553 | 0.0479 |
|
159 |
+
|
160 |
+
|
161 |
+
### Framework versions
|
162 |
+
|
163 |
+
- Transformers 4.36.0.dev0
|
164 |
+
- Pytorch 2.2.0.dev20231112+cu118
|
165 |
+
- Datasets 2.14.5
|
166 |
+
- Tokenizers 0.14.1
|
config.json
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "WinKawaks/vit-tiny-patch16-224",
|
3 |
+
"architectures": [
|
4 |
+
"ViTForImageClassification"
|
5 |
+
],
|
6 |
+
"attention_probs_dropout_prob": 0.0,
|
7 |
+
"encoder_stride": 16,
|
8 |
+
"hidden_act": "gelu",
|
9 |
+
"hidden_dropout_prob": 0.0,
|
10 |
+
"hidden_size": 192,
|
11 |
+
"id2label": {
|
12 |
+
"0": "ADVE",
|
13 |
+
"1": "Email",
|
14 |
+
"2": "Form",
|
15 |
+
"3": "Letter",
|
16 |
+
"4": "Memo",
|
17 |
+
"5": "News",
|
18 |
+
"6": "Note",
|
19 |
+
"7": "Report",
|
20 |
+
"8": "Resume",
|
21 |
+
"9": "Scientific"
|
22 |
+
},
|
23 |
+
"image_size": 224,
|
24 |
+
"initializer_range": 0.02,
|
25 |
+
"intermediate_size": 768,
|
26 |
+
"label2id": {
|
27 |
+
"ADVE": 0,
|
28 |
+
"Email": 1,
|
29 |
+
"Form": 2,
|
30 |
+
"Letter": 3,
|
31 |
+
"Memo": 4,
|
32 |
+
"News": 5,
|
33 |
+
"Note": 6,
|
34 |
+
"Report": 7,
|
35 |
+
"Resume": 8,
|
36 |
+
"Scientific": 9
|
37 |
+
},
|
38 |
+
"layer_norm_eps": 1e-12,
|
39 |
+
"model_type": "vit",
|
40 |
+
"num_attention_heads": 3,
|
41 |
+
"num_channels": 3,
|
42 |
+
"num_hidden_layers": 12,
|
43 |
+
"patch_size": 16,
|
44 |
+
"problem_type": "single_label_classification",
|
45 |
+
"qkv_bias": true,
|
46 |
+
"torch_dtype": "float32",
|
47 |
+
"transformers_version": "4.36.0.dev0"
|
48 |
+
}
|
model.safetensors
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:68ba7b450e6c506a7b8ac2bddd4104003f1848c6178b707345bac9eae4742182
|
3 |
+
size 22128104
|
test-logits.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:26275ba6dd80e72fd4938108af2a230fb7aa920ad467d8175265cebb36503fb5
|
3 |
+
size 92398
|
test-references.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a2afcfdc977d6e963da44f7d0b6169569f722c36f36eb2c2798b49630510363b
|
3 |
+
size 2128
|
training_args.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:cdeaaeedd5ee6fb5b84369546e832ab057a76227efccc4e3fcff5e13590d3a2f
|
3 |
+
size 4920
|
validation-logits.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:c8f50985101854f2738f651071b9fe5a2306183d7a52aa841a5de740c1953710
|
3 |
+
size 7659
|
validation-references.npz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0354b78de1e153edfd908a412b596b1a05abea3df9a94323763cbb1ee2631790
|
3 |
+
size 423
|