Spaces:
Runtime error
Runtime error
Commit
·
309bbd2
1
Parent(s):
ed358b1
Create Y
Browse files
Y
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import tensorflow as tf
|
3 |
+
from PIL import Image
|
4 |
+
|
5 |
+
|
6 |
+
def convert_to_tf_tensor(image: Image):
|
7 |
+
np_image = np.array(image)
|
8 |
+
tf_image = tf.convert_to_tensor(np_image)
|
9 |
+
# `expand_dims()` is used to add a batch dimension since
|
10 |
+
# the TF augmentation layers operates on batched inputs.
|
11 |
+
return tf.expand_dims(tf_image, 0)
|
12 |
+
|
13 |
+
|
14 |
+
def preprocess_train(example_batch):
|
15 |
+
"""Apply train_transforms across a batch."""
|
16 |
+
images = [
|
17 |
+
train_data_augmentation(convert_to_tf_tensor(image.convert("RGB"))) for image in example_batch["image"]
|
18 |
+
]
|
19 |
+
example_batch["pixel_values"] = [tf.transpose(tf.squeeze(image)) for image in images]
|
20 |
+
return example_batch
|
21 |
+
|
22 |
+
|
23 |
+
def preprocess_val(example_batch):
|
24 |
+
"""Apply val_transforms across a batch."""
|
25 |
+
images = [
|
26 |
+
val_data_augmentation(convert_to_tf_tensor(image.convert("RGB"))) for image in example_batch["image"]
|
27 |
+
]
|
28 |
+
example_batch["pixel_values"] = [tf.transpose(tf.squeeze(image)) for image in images]
|
29 |
+
return example_batch
|