-
Notifications
You must be signed in to change notification settings - Fork 23
/
util.py
26 lines (24 loc) · 1.13 KB
/
util.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
import numpy as np
import tensorflow as tf
# Softmax over axis
def softmax(target, axis, mask, epsilon=1e-12, name=None):
with tf.op_scope([target], name, 'softmax'):
max_axis = tf.reduce_max(target, axis, keep_dims=True)
target_exp = tf.exp(target-max_axis) * mask
normalize = tf.reduce_sum(target_exp, axis, keep_dims=True)
softmax = target_exp / (normalize + epsilon)
return softmax
def orthogonal_initializer(scale = 1.1):
''' From Lasagne and Keras. Reference: Saxe et al., http://arxiv.org/abs/1312.6120
'''
print('Warning -- You have opted to use the orthogonal_initializer function')
def _initializer(shape, dtype=tf.float32, partition_info=None):
flat_shape = (shape[0], np.prod(shape[1:]))
a = np.random.normal(0.0, 1.0, flat_shape)
u, _, v = np.linalg.svd(a, full_matrices=False)
# pick the one with the correct shape
q = u if u.shape == flat_shape else v
q = q.reshape(shape) #this needs to be corrected to float32
print('you have initialized one orthogonal matrix.')
return tf.constant(scale * q[:shape[0], :shape[1]], dtype=tf.float32)
return _initializer