bhimrazy commited on
Commit
2c1d9fe
1 Parent(s): b322c8a

updates config and removes old logs

Browse files
Files changed (34) hide show
  1. conf/config.yaml +3 -3
  2. logs/version_0/events.out.tfevents.1711601470.ip-10-192-12-81.130118.0 +0 -3
  3. logs/version_0/hparams.yaml +0 -18
  4. logs/version_1/events.out.tfevents.1711602966.ip-10-192-12-81.217430.0 +0 -3
  5. logs/version_1/hparams.yaml +0 -18
  6. logs/version_10/events.out.tfevents.1711608589.ip-10-192-12-81.551679.0 +0 -3
  7. logs/version_10/hparams.yaml +0 -3
  8. logs/version_11/events.out.tfevents.1711608649.ip-10-192-12-81.554365.0 +0 -3
  9. logs/version_11/hparams.yaml +0 -3
  10. logs/version_12/events.out.tfevents.1711612377.ip-10-192-12-81.707133.0 +0 -3
  11. logs/version_12/hparams.yaml +0 -3
  12. logs/version_13/events.out.tfevents.1711612773.ip-10-192-12-81.737721.0 +0 -3
  13. logs/version_13/hparams.yaml +0 -3
  14. logs/version_14/events.out.tfevents.1711614124.ip-10-192-12-81.860231.0 +0 -3
  15. logs/version_14/hparams.yaml +0 -3
  16. logs/version_15/events.out.tfevents.1711614171.ip-10-192-12-81.862518.0 +0 -3
  17. logs/version_15/hparams.yaml +0 -3
  18. logs/version_2/events.out.tfevents.1711604049.ip-10-192-12-81.290635.0 +0 -3
  19. logs/version_2/hparams.yaml +0 -3
  20. logs/version_3/events.out.tfevents.1711604094.ip-10-192-12-81.292608.0 +0 -3
  21. logs/version_3/hparams.yaml +0 -3
  22. logs/version_4/events.out.tfevents.1711605114.ip-10-192-12-81.360257.0 +0 -3
  23. logs/version_4/hparams.yaml +0 -3
  24. logs/version_5/events.out.tfevents.1711605139.ip-10-192-12-81.361991.0 +0 -3
  25. logs/version_5/hparams.yaml +0 -3
  26. logs/version_6/events.out.tfevents.1711605184.ip-10-192-12-81.365268.0 +0 -3
  27. logs/version_6/hparams.yaml +0 -3
  28. logs/version_7/events.out.tfevents.1711605904.ip-10-192-12-81.414480.0 +0 -3
  29. logs/version_7/hparams.yaml +0 -3
  30. logs/version_8/events.out.tfevents.1711607210.ip-10-192-12-81.474728.0 +0 -3
  31. logs/version_8/hparams.yaml +0 -3
  32. logs/version_9/events.out.tfevents.1711607507.ip-10-192-12-81.481863.0 +0 -3
  33. logs/version_9/hparams.yaml +0 -3
  34. train.py +2 -1
conf/config.yaml CHANGED
@@ -5,11 +5,11 @@ val_csv_path: data/diabetic-retinopathy-dataset/val.csv
5
  # experiment:
6
  seed: 42
7
  batch_size: 128
8
- num_workers: 2
9
  use_class_weighting: false
10
  use_weighted_sampler: false # class weighting and weighted sampler are mutually exclusive
11
- model_name: "resnet50"
12
- max_epochs: 50
13
  image_size: 224
14
  learning_rate: 3e-4
15
  use_scheduler: true
 
5
  # experiment:
6
  seed: 42
7
  batch_size: 128
8
+ num_workers: 32
9
  use_class_weighting: false
10
  use_weighted_sampler: false # class weighting and weighted sampler are mutually exclusive
11
+ model_name: "densenet169"
12
+ max_epochs: 20
13
  image_size: 224
14
  learning_rate: 3e-4
15
  use_scheduler: true
logs/version_0/events.out.tfevents.1711601470.ip-10-192-12-81.130118.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:6cd404cb2c8ef4a8b6031b8cc87f0866bdef4660a0502b155908a99fca84c6cf
3
- size 58766
 
 
 
 
logs/version_0/hparams.yaml DELETED
@@ -1,18 +0,0 @@
1
- class_weights: !!python/object/apply:torch._utils._rebuild_tensor_v2
2
- - !!python/object/apply:torch.storage._load_from_bytes
3
- - !!binary |
4
- gAKKCmz8nEb5IGqoUBkugAJN6QMugAJ9cQAoWBAAAABwcm90b2NvbF92ZXJzaW9ucQFN6QNYDQAA
5
- AGxpdHRsZV9lbmRpYW5xAohYCgAAAHR5cGVfc2l6ZXNxA31xBChYBQAAAHNob3J0cQVLAlgDAAAA
6
- aW50cQZLBFgEAAAAbG9uZ3EHSwR1dS6AAihYBwAAAHN0b3JhZ2VxAGN0b3JjaApGbG9hdFN0b3Jh
7
- Z2UKcQFYCQAAADE0NzA4NTcyOHECWAMAAABjcHVxA0sFTnRxBFEugAJdcQBYCQAAADE0NzA4NTcy
8
- OHEBYS4FAAAAAAAAAERciz6DDzhAg+OpP+jQAEGj2x5B
9
- - 0
10
- - !!python/tuple
11
- - 5
12
- - !!python/tuple
13
- - 1
14
- - false
15
- - !!python/object/apply:collections.OrderedDict
16
- - []
17
- learning_rate: 0.0003
18
- num_classes: 5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
logs/version_1/events.out.tfevents.1711602966.ip-10-192-12-81.217430.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9282b1484cb133a2ca348c08e968192b61129545da3de0a98e9842362530fd02
3
- size 58766
 
 
 
 
logs/version_1/hparams.yaml DELETED
@@ -1,18 +0,0 @@
1
- class_weights: !!python/object/apply:torch._utils._rebuild_tensor_v2
2
- - !!python/object/apply:torch.storage._load_from_bytes
3
- - !!binary |
4
- gAKKCmz8nEb5IGqoUBkugAJN6QMugAJ9cQAoWBAAAABwcm90b2NvbF92ZXJzaW9ucQFN6QNYDQAA
5
- AGxpdHRsZV9lbmRpYW5xAohYCgAAAHR5cGVfc2l6ZXNxA31xBChYBQAAAHNob3J0cQVLAlgDAAAA
6
- aW50cQZLBFgEAAAAbG9uZ3EHSwR1dS6AAihYBwAAAHN0b3JhZ2VxAGN0b3JjaApGbG9hdFN0b3Jh
7
- Z2UKcQFYCQAAADE1OTE5MDc4NHECWAMAAABjcHVxA0sFTnRxBFEugAJdcQBYCQAAADE1OTE5MDc4
8
- NHEBYS4FAAAAAAAAAERciz6DDzhAg+OpP+jQAEGj2x5B
9
- - 0
10
- - !!python/tuple
11
- - 5
12
- - !!python/tuple
13
- - 1
14
- - false
15
- - !!python/object/apply:collections.OrderedDict
16
- - []
17
- learning_rate: 0.001
18
- num_classes: 5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
logs/version_10/events.out.tfevents.1711608589.ip-10-192-12-81.551679.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:63066cc7dc064f7d16c28bef5d4a921e74d7bb9d1cb9612d2a7d6ed645ba10ec
3
- size 503
 
 
 
 
logs/version_10/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_11/events.out.tfevents.1711608649.ip-10-192-12-81.554365.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:83d7f17af1cc5053eb3f8aee7a8fd7068e1a535763dd1502bf01b0e9c03792c4
3
- size 85700
 
 
 
 
logs/version_11/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_12/events.out.tfevents.1711612377.ip-10-192-12-81.707133.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2545b280db3e9b22d13c544f0a6340ccbe8aecb71720dc8b111eb05e4ecbf26f
3
- size 104769
 
 
 
 
logs/version_12/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_13/events.out.tfevents.1711612773.ip-10-192-12-81.737721.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:91e68ae4970ba86f132b81f8297fe22eaf8c1cb9e5d4aa5fe2aecac3db98b240
3
- size 333307
 
 
 
 
logs/version_13/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_14/events.out.tfevents.1711614124.ip-10-192-12-81.860231.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:e7984ee7cda4da73a53a94b997414285523d10a540da1198b534f654d914ef9b
3
- size 503
 
 
 
 
logs/version_14/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_15/events.out.tfevents.1711614171.ip-10-192-12-81.862518.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:54d63f045df566ba8eb592871cfd9dfb4ae11b10fbf1de95bcfb8548e47c8739
3
- size 45065
 
 
 
 
logs/version_15/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_2/events.out.tfevents.1711604049.ip-10-192-12-81.290635.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5237f21ce994701adc64081a6d5d7c84a5012901c6be6c84feae54d3376610f3
3
- size 503
 
 
 
 
logs/version_2/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.001
3
- num_classes: 5
 
 
 
 
logs/version_3/events.out.tfevents.1711604094.ip-10-192-12-81.292608.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:2d8f4a6a61b20cdca5efffb99d7b736c8de986b934177ce5028d87ad180e589c
3
- size 48476
 
 
 
 
logs/version_3/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.001
3
- num_classes: 5
 
 
 
 
logs/version_4/events.out.tfevents.1711605114.ip-10-192-12-81.360257.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:74949ea565c0abb5d5820d0c39d3d60db134c35c5a3c99a5b3c26ec9ab8afbf3
3
- size 503
 
 
 
 
logs/version_4/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.001
3
- num_classes: 5
 
 
 
 
logs/version_5/events.out.tfevents.1711605139.ip-10-192-12-81.361991.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:79614e16c5db8eadff6ec45e3a66eb24481274275f53897bf63ffdec788dd2e3
3
- size 503
 
 
 
 
logs/version_5/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.001
3
- num_classes: 5
 
 
 
 
logs/version_6/events.out.tfevents.1711605184.ip-10-192-12-81.365268.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:5a6812efb6904038e4256734c42694ebd1da07569bcca052f28cf57869954e93
3
- size 85700
 
 
 
 
logs/version_6/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.001
3
- num_classes: 5
 
 
 
 
logs/version_7/events.out.tfevents.1711605904.ip-10-192-12-81.414480.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:b1798d9a198441d90ae3ea3b1aa411c8e558e9ed03ebcb11f770f43ec7111866
3
- size 85700
 
 
 
 
logs/version_7/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_8/events.out.tfevents.1711607210.ip-10-192-12-81.474728.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:40e2fef7a0a941a74909ac0d74610257cae447b8ada387b60286f58b545f0d58
3
- size 9933
 
 
 
 
logs/version_8/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
logs/version_9/events.out.tfevents.1711607507.ip-10-192-12-81.481863.0 DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:23a0745c95a5f3cc0ba4ab59b0adc600367067440a430db9ff45add5c2b12483
3
- size 114231
 
 
 
 
logs/version_9/hparams.yaml DELETED
@@ -1,3 +0,0 @@
1
- class_weights: null
2
- learning_rate: 0.0003
3
- num_classes: 5
 
 
 
 
train.py CHANGED
@@ -2,6 +2,7 @@ from os.path import join
2
 
3
  import hydra
4
  import lightning as L
 
5
  from lightning.pytorch.callbacks import (
6
  EarlyStopping,
7
  LearningRateMonitor,
@@ -21,7 +22,7 @@ def train(cfg: DictConfig) -> None:
21
 
22
  # Seed everything for reproducibility
23
  L.seed_everything(cfg.seed, workers=True)
24
- # torch.set_float32_matmul_precision("high")
25
 
26
  # Initialize DataModule
27
  dm = DRDataModule(
 
2
 
3
  import hydra
4
  import lightning as L
5
+ import torch
6
  from lightning.pytorch.callbacks import (
7
  EarlyStopping,
8
  LearningRateMonitor,
 
22
 
23
  # Seed everything for reproducibility
24
  L.seed_everything(cfg.seed, workers=True)
25
+ torch.set_float32_matmul_precision("high")
26
 
27
  # Initialize DataModule
28
  dm = DRDataModule(