From 16653a48ae4238419f0e7465a5c4b0bf2e62f3f6 Mon Sep 17 00:00:00 2001 From: Nicolas Date: Thu, 30 Jan 2020 09:17:29 +0000 Subject: [PATCH] Add tensorboard graph logging --- .gitignore | 9 +++++++++ models/network_blocks.py | 9 +++++---- utils/trainer.py | 4 +++- 3 files changed, 17 insertions(+), 5 deletions(-) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..93dc49d --- /dev/null +++ b/.gitignore @@ -0,0 +1,9 @@ +Data +results +cpp_wrappers/cpp_subsampling/build +*.so +__pycache__ +.vscode +tensorboard +*.pyc +*.ply \ No newline at end of file diff --git a/models/network_blocks.py b/models/network_blocks.py index a92168d..5af9ab7 100755 --- a/models/network_blocks.py +++ b/models/network_blocks.py @@ -1038,19 +1038,21 @@ def assemble_CNN_blocks(inputs, config, dropout_prob): # Loop over consecutive blocks block_in_layer = 0 + tf_scope_layer = 0 for block_i, block in enumerate(config.architecture): # Detect change to next layer if np.any([tmp in block for tmp in ['pool', 'strided', 'upsample', 'global']]): - # Save this layer features F += [features] + block_in_layer = 0 + tf_scope_layer += 1 # Detect upsampling block to stop if 'upsample' in block: break - with tf.variable_scope('layer_{:d}/{:s}_{:d}'.format(layer, block, block_in_layer)): + with tf.variable_scope('layer_{:d}/{:s}_{:d}'.format(tf_scope_layer, block, block_in_layer)): # Get the function for this layer block_ops = get_block_ops(block) @@ -1074,8 +1076,7 @@ def assemble_CNN_blocks(inputs, config, dropout_prob): layer += 1 r *= 2 fdim *= 2 - block_in_layer = 0 - + # Save feature vector after global pooling if 'global' in block: # Save this layer features diff --git a/utils/trainer.py b/utils/trainer.py index 7bd959f..0eae99c 100755 --- a/utils/trainer.py +++ b/utils/trainer.py @@ -101,7 +101,9 @@ def __init__(self, model, restore_snap=None): restorer = tf.train.Saver(restore_vars) restorer.restore(self.sess, restore_snap) print("Model restored.") - + + file_writer = tf.summary.FileWriter(join(model.saving_path,"tensorboard"), self.sess.graph) + file_writer.close() def add_train_ops(self, model): """ Add training ops on top of the model