forked from sischei/DeepEquilibriumNets
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathutils.py
More file actions
54 lines (39 loc) · 1.5 KB
/
utils.py
File metadata and controls
54 lines (39 loc) · 1.5 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
"""Helper functions for the jupyter notebooks."""
from datetime import datetime
import math
import numpy as np
import tensorflow as tf
def initialize_nn_weight(dim):
"""Initialize neural network weight or bias variable.
Args:
dim <list>: dimensions of weight matrix.
- if len(dim) == 1: initializes bias,
- if len(dim) == 2: initializes weight.
Returns:
Tensor variable of normally initialized data.
"""
t_stnd = tf.sqrt(tf.cast(dim[0], tf.float32)) * 10
return tf.Variable(tf.random_normal(tf.cast(dim, tf.int32)) / t_stnd, trainable=True)
def random_mini_batches(X, minibatch_size=64, seed=0):
"""Generate random minibatches from X.
Args:
X <array>: Input data to be mini-batched.
minibatch_size <int>: mini-batch size.
seed <int>: seed.
Returns:
List of mini-batches generated from X.
"""
np.random.seed(seed)
m = X.shape[0]
mini_batches = []
# Step 1: Shuffle X
permutation = list(np.random.permutation(m))
shuffled_X = X[permutation, :]
# Step 2: Partition shuffled_X. Minus the end case.
# number of mini batches of size mini_batch_size in your partitionning
num_complete_minibatches = int(math.floor(m / minibatch_size))
for k in range(0, num_complete_minibatches):
mini_batch_X = shuffled_X[(k * minibatch_size):((k+1) * minibatch_size), :]
mini_batch = (mini_batch_X)
mini_batches.append(mini_batch)
return mini_batches