Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
9 changes: 9 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
Data
results
cpp_wrappers/cpp_subsampling/build
*.so
__pycache__
.vscode
tensorboard
*.pyc
*.ply
9 changes: 5 additions & 4 deletions models/network_blocks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1038,19 +1038,21 @@ def assemble_CNN_blocks(inputs, config, dropout_prob):

# Loop over consecutive blocks
block_in_layer = 0
tf_scope_layer = 0
for block_i, block in enumerate(config.architecture):

# Detect change to next layer
if np.any([tmp in block for tmp in ['pool', 'strided', 'upsample', 'global']]):

# Save this layer features
F += [features]
block_in_layer = 0
tf_scope_layer += 1

# Detect upsampling block to stop
if 'upsample' in block:
break

with tf.variable_scope('layer_{:d}/{:s}_{:d}'.format(layer, block, block_in_layer)):
with tf.variable_scope('layer_{:d}/{:s}_{:d}'.format(tf_scope_layer, block, block_in_layer)):

# Get the function for this layer
block_ops = get_block_ops(block)
Expand All @@ -1074,8 +1076,7 @@ def assemble_CNN_blocks(inputs, config, dropout_prob):
layer += 1
r *= 2
fdim *= 2
block_in_layer = 0


# Save feature vector after global pooling
if 'global' in block:
# Save this layer features
Expand Down
4 changes: 3 additions & 1 deletion utils/trainer.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,9 @@ def __init__(self, model, restore_snap=None):
restorer = tf.train.Saver(restore_vars)
restorer.restore(self.sess, restore_snap)
print("Model restored.")


file_writer = tf.summary.FileWriter(join(model.saving_path,"tensorboard"), self.sess.graph)
file_writer.close()
def add_train_ops(self, model):
"""
Add training ops on top of the model
Expand Down