heado commited on
Commit
93f3bcb
1 Parent(s): f430d2b
README.md ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: google/vit-base-patch16-224-in21k
4
+ tags:
5
+ - HHD
6
+ - 3_class
7
+ - ViT
8
+ - generated_from_trainer
9
+ model-index:
10
+ - name: ViT_beans
11
+ results: []
12
+ ---
13
+
14
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
15
+ should probably proofread and complete it, then remove this comment. -->
16
+
17
+ # ViT_beans
18
+
19
+ This model is a fine-tuned version of [google/vit-base-patch16-224-in21k](https://huggingface.co/google/vit-base-patch16-224-in21k) on the beans dataset.
20
+ It achieves the following results on the evaluation set:
21
+ - Loss: 0.0515
22
+
23
+ ## Model description
24
+
25
+ More information needed
26
+
27
+ ## Intended uses & limitations
28
+
29
+ More information needed
30
+
31
+ ## Training and evaluation data
32
+
33
+ More information needed
34
+
35
+ ## Training procedure
36
+
37
+ ### Training hyperparameters
38
+
39
+ The following hyperparameters were used during training:
40
+ - learning_rate: 0.0002
41
+ - train_batch_size: 16
42
+ - eval_batch_size: 8
43
+ - seed: 42
44
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
45
+ - lr_scheduler_type: linear
46
+ - num_epochs: 4
47
+ - mixed_precision_training: Native AMP
48
+
49
+ ### Training results
50
+
51
+ | Training Loss | Epoch | Step | Validation Loss |
52
+ |:-------------:|:------:|:----:|:---------------:|
53
+ | 0.0722 | 1.5385 | 100 | 0.1296 |
54
+ | 0.0629 | 3.0769 | 200 | 0.0515 |
55
+
56
+
57
+ ### Framework versions
58
+
59
+ - Transformers 4.42.4
60
+ - Pytorch 2.4.0+cu121
61
+ - Datasets 2.21.0
62
+ - Tokenizers 0.19.1
config.json ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "google/vit-base-patch16-224-in21k",
3
+ "architectures": [
4
+ "ViTForImageClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.0,
7
+ "encoder_stride": 16,
8
+ "hidden_act": "gelu",
9
+ "hidden_dropout_prob": 0.0,
10
+ "hidden_size": 768,
11
+ "id2label": {
12
+ "0": "angular_leaf_spot",
13
+ "1": "bean_rust",
14
+ "2": "healthy"
15
+ },
16
+ "image_size": 224,
17
+ "initializer_range": 0.02,
18
+ "intermediate_size": 3072,
19
+ "label2id": {
20
+ "angular_leaf_spot": "0",
21
+ "bean_rust": "1",
22
+ "healthy": "2"
23
+ },
24
+ "layer_norm_eps": 1e-12,
25
+ "model_type": "vit",
26
+ "num_attention_heads": 12,
27
+ "num_channels": 3,
28
+ "num_hidden_layers": 12,
29
+ "patch_size": 16,
30
+ "problem_type": "single_label_classification",
31
+ "qkv_bias": true,
32
+ "torch_dtype": "float32",
33
+ "transformers_version": "4.42.4"
34
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:33c98fe4f5721871e37977cb56cce201f5178926cf7e596ad70ade08567f9a38
3
+ size 343227052
preprocessor_config.json ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "do_normalize": true,
3
+ "do_rescale": true,
4
+ "do_resize": true,
5
+ "image_mean": [
6
+ 0.5,
7
+ 0.5,
8
+ 0.5
9
+ ],
10
+ "image_processor_type": "ViTFeatureExtractor",
11
+ "image_std": [
12
+ 0.5,
13
+ 0.5,
14
+ 0.5
15
+ ],
16
+ "resample": 2,
17
+ "rescale_factor": 0.00392156862745098,
18
+ "size": {
19
+ "height": 224,
20
+ "width": 224
21
+ }
22
+ }
runs/Sep02_03-26-07_3d244b9d4949/events.out.tfevents.1725247739.3d244b9d4949.6417.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:da7cc2d3d72375be76c37f19d9b415cc252d6069a7cd4669377663d608d89ac1
3
+ size 4882
runs/Sep02_03-26-07_3d244b9d4949/events.out.tfevents.1725247789.3d244b9d4949.6417.1 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b6741ba62f123eb9804e7c213449963572177aa88064d0499bb501894c7c8b4b
3
+ size 19264
runs/Sep02_03-36-42_3d244b9d4949/events.out.tfevents.1725248212.3d244b9d4949.6417.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cdb120e37425920bb1abf61a6e7bf6fe0f5de585b95e0c07ae7873770c7b9ec2
3
+ size 4882
runs/Sep02_03-36-42_3d244b9d4949/events.out.tfevents.1725248289.3d244b9d4949.6417.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:06d30f86e18d03018e9e5028c7af5b9060dc8d53b1092ac09ddeb8a1b4bb7029
3
+ size 4882
runs/Sep02_03-36-42_3d244b9d4949/events.out.tfevents.1725248490.3d244b9d4949.6417.4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20290de0dfded3a60523ff224bc04cf360827559b11790be05ffb6ae988ce571
3
+ size 4882
runs/Sep02_03-45-12_3d244b9d4949/events.out.tfevents.1725248722.3d244b9d4949.6417.10 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef8b4142178f29922ff15af5f52fcd5cd99d4deb9b21eedd6097f3ff92c75e4d
3
+ size 11256
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f86f8444eab9c95536c114323caefb5df5c77ab4a1d2e5b600052ec7ed6968b0
3
+ size 5112