-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathcreate_data.py
More file actions
116 lines (85 loc) · 3.26 KB
/
create_data.py
File metadata and controls
116 lines (85 loc) · 3.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
import os
import sys
import numpy as np
import tensorflow as tf
from scipy import signal
from numpy.fft import fft as FFT;
from numpy.fft import ifft as IFFT;
from numpy.fft import fftfreq as FREQS;
files = []
labels = []
imageDir = "../../data/"
for filename in os.listdir(imageDir):
if filename.startswith("nfibers109_617"):
try:
delay = 0;
with open(imageDir + filename) as f:
for line in f:
delay = float(line.split("=")[1])
break
labels.append(delay)
files.append(filename)
except:
continue
c = list(zip(files, labels))
np.random.shuffle(c)
files, labels = zip(*c)
train_files = files[0:int(0.85*len(files))]
train_labels = labels[0:int(0.85*len(labels))]
val_files = files[int(0.85*len(files)):int(0.925*len(files))]
val_labels = labels[int(0.85*len(labels)):int(0.925*len(labels))]
test_files = files[int(0.925*len(files)):]
test_labels = labels[int(0.925*len(labels)):]
def load_image(filename):
image = np.loadtxt(imageDir + filename)
FFT_PLACEHOLDER = np.zeros((image.shape[0], 128, 2))
FFT_IMAGE = FFT(image,axis=1)
FFT_PLACEHOLDER[:,:,0] = np.abs(FFT_IMAGE)[:,:128];
FFT_PLACEHOLDER[:,:,1] = np.diff(np.unwrap(np.angle(FFT_IMAGE), axis = 1), axis = 1)[:,:128];
return FFT_PLACEHOLDER
def _float_feature(value):
return tf.train.Feature(float_list=tf.train.FloatList(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
train_filename = 'train.tfrecords'
# open the TFRecords file
writer = tf.python_io.TFRecordWriter(train_filename)
for i in range(len(train_files)):
if not i % 1000:
print('Train data: {}/{}'.format(i, len(train_files)))
sys.stdout.flush()
img = load_image(train_files[i])
label = train_labels[i]
feature = {'train/label': _float_feature(label), 'train/image': _bytes_feature(tf.compat.as_bytes(img.tostring()))}
example = tf.train.Example(features=tf.train.Features(feature=feature))
writer.write(example.SerializeToString())
writer.close()
sys.stdout.flush()
val_filename = 'val.tfrecords'
# open the TFRecords file
writer = tf.python_io.TFRecordWriter(val_filename)
for i in range(len(val_files)):
if not i % 1000:
print('Val data: {}/{}'.format(i, len(val_files)))
sys.stdout.flush()
img = load_image(val_files[i])
label = val_labels[i]
feature = {'val/label': _float_feature(label), 'val/image': _bytes_feature(tf.compat.as_bytes(img.tostring()))}
example = tf.train.Example(features=tf.train.Features(feature=feature))
writer.write(example.SerializeToString())
writer.close()
sys.stdout.flush()
test_filename = 'test.tfrecords'
# open the TFRecords file
writer = tf.python_io.TFRecordWriter(test_filename)
for i in range(len(test_files)):
if not i % 1000:
print('Test data: {}/{}'.format(i, len(test_files)))
sys.stdout.flush()
img = load_image(test_files[i])
label = test_labels[i]
feature = {'test/label': _float_feature(label), 'test/image': _bytes_feature(tf.compat.as_bytes(img.tostring()))}
example = tf.train.Example(features=tf.train.Features(feature=feature))
writer.write(example.SerializeToString())
writer.close()
sys.stdout.flush()