Spaces:
Sleeping
Sleeping
File size: 3,930 Bytes
5d460ce c16c201 5d460ce c16c201 5d460ce 6bd27ab 5d460ce f3555c8 5d460ce 96037a8 5d460ce 96037a8 5d460ce 96037a8 5d460ce f3555c8 c16c201 f3555c8 96037a8 f3555c8 96037a8 f3555c8 5d460ce c16c201 8cc439d f3555c8 c16c201 b26fc68 c16c201 b26fc68 c16c201 b26fc68 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 |
import pytest
from pathlib import Path
from typing import List
import numpy as np
from PIL import Image
import torch
import torchvision.transforms as tvtfms
from fastai.vision.data import PILImage
import fastai.vision.augment as fastai_aug
from deployment.transforms import resized_crop_pad, gpu_crop
DATA_PATH = "data/200-bird-species-with-11788-images"
def get_birds_images(path: Path) -> List[str]:
with open(path / "images.txt", "r") as file:
lines = [
path.resolve() / "images" / line.strip().split()[1]
for line in file.readlines()
]
return lines
class TestTransforms:
im_idx = 510
@pytest.fixture
def img_paths(self) -> List[str]:
path = Path(DATA_PATH) / "CUB_200_2011"
return get_birds_images(path.resolve())
@pytest.fixture
def im_fastai(self, img_paths: List[str]) -> PILImage:
fname = img_paths[self.im_idx]
return PILImage.create(fname)
@pytest.fixture
def im_pil(self, img_paths: List[str]) -> Image:
fname = img_paths[self.im_idx]
return Image.open(fname)
def testImageFastaiEqualsPillow(self, im_fastai: PILImage, im_pil: Image):
assert (np.array(im_fastai) == np.array(im_pil)).all()
# RandomResizedCrop is not exactly equal to CropPad in validation
# # def testRandomResizedCropEqualsCropPad(self, im_fastai: PILImage):
# # crop_fastai = fastai_aug.CropPad((460, 460))
# # crop_rrc = fastai_aug.RandomResizedCrop((460, 460))
# # cropped_rrc = crop_rrc(im_fastai, split_idx=1)
# # cropped_fastai = crop_fastai(im_fastai, split_idx=1)
# # assert (np.array(cropped_rrc) == np.array(cropped_fastai)).all()
def testRandomResizedCropEqualsCustomResizedCropPad(
self, im_fastai: PILImage, im_pil: Image
):
crop_rrc = fastai_aug.RandomResizedCrop((460, 460))
assert (
np.array(crop_rrc(im_fastai, split_idx=1))
== np.array(resized_crop_pad(im_pil, (460, 460)))
).all()
def testFlipEqualsCustomGPUCrop(self, im_fastai: PILImage, im_pil: Image):
# apply flip augmentation on validation
tt_fastai = fastai_aug.ToTensor()
i2f_fastai = fastai_aug.IntToFloatTensor()
flip = fastai_aug.Flip(size=(224, 224))
result_im_fastai = flip(
i2f_fastai(tt_fastai(im_fastai).unsqueeze(0)), split_idx=1
)
# apply custom gpu crop
tt_torch = tvtfms.ToTensor()
result_im_tv = gpu_crop(tt_torch(im_pil).unsqueeze(0), size=(224, 224))
assert torch.allclose(result_im_fastai, result_im_tv)
assert (result_im_fastai == result_im_tv).all()
def testFastaiTransformsEqualsCustom(self, im_fastai: PILImage, im_pil: Image):
# fastai transforms
crop_rrc = fastai_aug.RandomResizedCrop((460, 460))
tt_fastai = fastai_aug.ToTensor()
i2f_fastai = fastai_aug.IntToFloatTensor()
flip = fastai_aug.Flip(size=(224, 224))
brightness = fastai_aug.Brightness()
norm_fastai = fastai_aug.Normalize.from_stats(
*fastai_aug.imagenet_stats, cuda=False
)
# custom transforms
tt_torch = tvtfms.ToTensor()
norm_torch = tvtfms.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225])
# apply all fastai augmentations on validation (transformations)
batch_im_fastai = tt_fastai(crop_rrc(im_fastai, split_idx=1)).unsqueeze(0)
result_im_fastai = norm_fastai(
brightness(flip(i2f_fastai(batch_im_fastai), split_idx=1), split_idx=1)
)
# apply all custom transformations
batch_im_tv = tt_torch(resized_crop_pad(im_pil, (460, 460))).unsqueeze(0)
result_im_tv = norm_torch(gpu_crop(batch_im_tv, size=(224, 224)))
assert torch.allclose(result_im_fastai, result_im_tv)
assert (result_im_fastai == result_im_tv).all()
|