-
Notifications
You must be signed in to change notification settings - Fork 4
/
base_ops.py
executable file
·47 lines (36 loc) · 1.4 KB
/
base_ops.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import keras.backend as K
def switch(condition, t, e):
if K.backend() == 'tensorflow':
import tensorflow as tf
return tf.where(condition, t, e)
elif K.backend() == 'theano':
import theano.tensor as tt
return tt.switch(condition, t, e)
def round_through(x):
'''Element-wise rounding to the closest integer with full gradient propagation.
A trick from [Sergey Ioffe](http://stackoverflow.com/a/36480182)
'''
rounded = K.round(x)
return x + K.stop_gradient(rounded - x)
def clip_through(x, min, max):
'''Element-wise rounding to the closest integer with full gradient propagation.
A trick from [Sergey Ioffe](http://stackoverflow.com/a/36480182)
'''
clipped = K.clip(x,min,max)
return x + K.stop_gradient(clipped - x)
def _hard_sigmoid(x):
'''Hard sigmoid different from the more conventional form (see definition of K.hard_sigmoid).
# Reference:
- [BinaryNet: Training Deep Neural Networks with Weights and Activations Constrained to +1 or -1, Courbariaux et al. 2016](http://arxiv.org/abs/1602.02830}
'''
x = (0.5 * x) + 0.5
return K.clip(x, 0, 1)
def log2_through(W):
constant = np.log(2)
logged = K.log(K.abs(W))/constant
return W + K.stop_gradient(logged-W)
def pow_through(W, base):
ones = K.ones_like(W)
base = ones + base - 1
powwed = K.pow(base, W)
return W + K.stop_gradient(powwed - W)