-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathlayer.py
More file actions
43 lines (32 loc) · 1.63 KB
/
layer.py
File metadata and controls
43 lines (32 loc) · 1.63 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import tensorflow as tf
slim = tf.contrib.slim
def conv2d(net, output_filters, kernel, stride, mode='REFLECT', relu=False):
net = tf.pad(net, [[0, 0], [kernel / 2, kernel / 2], [kernel / 2, kernel / 2], [0, 0]], mode=mode)
if relu:
return slim.conv2d(net, output_filters, kernel, stride=stride, activation_fn=tf.nn.relu)
else:
return slim.conv2d(net, output_filters, kernel, stride=stride)
def resize_conv2d(net, output_filters, kernel, stride, training):
'''
An alternative to transposed convolution where we first resize, then convolve.
See http://distill.pub/2016/deconv-checkerboard/
For some reason the shape needs to be statically known for gradient propagation
through tf.image.resize_images, but we only know that for fixed image size, so we
plumb through a "training" argument
'''
height = net.get_shape()[1].value if training else tf.shape(net)[1]
width = net.get_shape()[2].value if training else tf.shape(net)[2]
new_height = height * stride * 2
new_width = width * stride * 2
net = tf.image.resize_images(net, [new_height, new_width], tf.image.ResizeMethod.NEAREST_NEIGHBOR)
return conv2d(net, output_filters, kernel, stride)
def instance_norm(x):
epsilon = 1e-9
mean, var = tf.nn.moments(x, [1, 2], keep_dims=True)
return tf.div(tf.sub(x, mean), tf.sqrt(tf.add(var, epsilon)))
def residual_block(net, filters=128, kernel=3, stride=1, scope=None):
with tf.variable_scope(scope, 'residual'):
tower1 = conv2d(net, filters, kernel, stride, relu=True)
tower2 = conv2d(tower1, filters, kernel, stride)
net = net + tower2
return net