-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathdata_loader.py
More file actions
95 lines (73 loc) · 3.11 KB
/
data_loader.py
File metadata and controls
95 lines (73 loc) · 3.11 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
import pickle
import numpy as np
import tensorflow as tf
import pdb
class DataLoader():
""" Load CIFAR dataset """
def __init__(self, config):
# Load config file
self.config = config
def unpickle(self, file):
with open(file, 'rb') as fo:
dict = pickle.load(fo, encoding='bytes')
return dict
def load_cifar10(self, data_path):
# from https://luckydanny.blogspot.com/2016/07/load-cifar-10-dataset-in-python3.html
train_data = None
train_labels = []
test_data = None
test_labels = None
for i in range(1, 6):
data_dict = self.unpickle(data_path + "data_batch_" + str(i))
if (i == 1):
train_data = data_dict[b'data']
else:
train_data = np.vstack((train_data, data_dict[b'data']))
train_labels += data_dict[b'labels']
test_data_dict = self.unpickle(data_path + "test_batch")
test_data = test_data_dict[b'data']
test_labels = test_data_dict[b'labels']
train_data = train_data.reshape((50000, 3, 32, 32))
train_data = np.rollaxis(train_data, 1, 4)
train_labels = np.array(train_labels)
test_data = test_data.reshape((10000, 3, 32, 32))
test_data = np.rollaxis(test_data, 1, 4)
test_labels = np.array(test_labels)
return train_data, train_labels, test_data, test_labels
def prepare_data(self):
""" Prepare data by using TensorFlow data API"""
X_train, y_train, X_test, y_test = self.load_cifar10(self.config["input"]["data_path"])
self.len_data = X_train.shape[0]
train_dataset = tf.data.Dataset.from_tensor_slices((X_train, y_train))
test_dataset = tf.data.Dataset.from_tensor_slices((X_test, y_test))
train_dataset = train_dataset.map(self.preprocess)
test_dataset = test_dataset.map(self.preprocess)
batch_size = self.config["trainer"]["batch_size"]
buffer_size = self.config["trainer"]["buffer_size"]
train_loader = train_dataset.shuffle(buffer_size).batch(batch_size)
test_loader = test_dataset.shuffle(buffer_size).batch(batch_size)
return train_loader, test_loader
def preprocess(self, image, label):
# cast image to float32 type
image = tf.cast(image, tf.float32)
# resize images
image = tf.image.resize(image, (self.config["input"]["size"][0], self.config["input"]["size"][1]))
# normalize according to training data stats
image = (image - self.config["input"]["mean"]) / self.config["input"]["std"]
# data augmentation
image = self.augment(image)
# convert label to one hot
label = tf.one_hot(label, 10)
return image, label
def augment(self, image):
""" Image augmentation """
image = self._random_flip(image)
return image
def _random_flip(self, image):
""" Flip augmentation randomly"""
image = tf.image.random_flip_left_right(image)
image = tf.image.random_flip_up_down(image)
return image
def __len__(self):
""" Length of data """
return self.len_data