File size: 1,193 Bytes
9f31860
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30df46a
9f31860
 
 
 
30df46a
9f31860
 
 
 
 
 
 
 
 
 
 
 
 
30df46a
 
 
9f31860
 
 
 
 
 
30df46a
 
9f31860
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
import lightning as L
import torch
from lightning.pytorch.callbacks import ModelCheckpoint, LearningRateMonitor
from lightning.pytorch.loggers import TensorBoardLogger

from src.dataset import DRDataModule
from src.model import DRModel

# seed everything for reproducibility
SEED = 42
L.seed_everything(SEED, workers=True)
torch.set_float32_matmul_precision("high")


# Init DataModule
dm = DRDataModule(batch_size=96, num_workers=8)
dm.setup()

# Init model from datamodule's attributes
model = DRModel(
    num_classes=dm.num_classes, learning_rate=3e-5, class_weights=dm.class_weights
)

# Init logger
logger = TensorBoardLogger("lightning_logs", name="dr_model")

# Init callbacks
checkpoint_callback = ModelCheckpoint(
    monitor="val_loss",
    mode="min",
    save_top_k=3,
    dirpath="checkpoints",
)

# Init LearningRateMonitor
lr_monitor = LearningRateMonitor(logging_interval="step")

# Init trainer
trainer = L.Trainer(
    max_epochs=20,
    accelerator="auto",
    devices="auto",
    logger=logger,
    callbacks=[checkpoint_callback, lr_monitor],
    enable_checkpointing=True,
)

# Pass the datamodule as arg to trainer.fit to override model hooks :)
trainer.fit(model, dm)