You signed in with another tab or window. Reload to refresh your session.You signed out in another tab or window. Reload to refresh your session.You switched accounts on another tab or window. Reload to refresh your session.Dismiss alert
Hi, I used ZhuSuan library to build bayesian lstm cell. I used the code that was in paper of you:ZhuSuan: A Library for Bayesian Deep python. But I got an error:
AttributeError: module 'tensorflow' has no attribute 'log'
Could someone help me to solve this problem?
class BayesianLSTMCell(object):
def init(self, num_units, forget_bias=1.0):
self._forget_bias = forget_bias
w_mean = tf.zeros([2 * num_units + 1, 4 * num_units])
self._w = zs.Normal('w', w_mean, std=1., group_ndims=2)
def call(self, state, inputs):
c, h = state
batch_size = tf.shape(inputs)[0]
linear_in = tf.concat([inputs, h, tf.ones([batch_size, 1])], axis=1)
linear_out = tf.matmul(linear_in, self._w)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = tf.split(value=linear_out, num_or_size_splits=4, axis=1)
new_c = (c * tf.sigmoid(f + self._forget_bias) +
tf.sigmoid(i) * tf.tanh(j))
new_h = tf.tanh(new_c) * tf.sigmoid(o)
return new_c, new_h
def bayesian_rnn(cell, inputs, seq_len):
batch_size = tf.shape(inputs)[0]
initializer = (tf.zeros([batch_size, 128]), tf.zeros([batch_size, 128]))
c_list, h_list = tf.scan(cell, inputs, initializer=initializer)
relevant_outputs = tf.gather_nd(
h_list, tf.stack([seq_len - 1, tf.range(batch_size)], axis=1))
logits = tf.squeeze(tf.layers.dense(relevant_outputs, 1), -1)
return logits
seq_len=5
with zs.BayesianNet() as model:
cell = BayesianLSTMCell(128, forget_bias=0.)
logits = bayesian_rnn(cell, b, seq_len)
_ = zs.Bernoulli(Y, logits, dtype=tf.float32
The text was updated successfully, but these errors were encountered:
Hi, I used ZhuSuan library to build bayesian lstm cell. I used the code that was in paper of you:ZhuSuan: A Library for Bayesian Deep python. But I got an error:
AttributeError: module 'tensorflow' has no attribute 'log'
Could someone help me to solve this problem?
class BayesianLSTMCell(object):
def init(self, num_units, forget_bias=1.0):
self._forget_bias = forget_bias
w_mean = tf.zeros([2 * num_units + 1, 4 * num_units])
self._w = zs.Normal('w', w_mean, std=1., group_ndims=2)
def call(self, state, inputs):
c, h = state
batch_size = tf.shape(inputs)[0]
linear_in = tf.concat([inputs, h, tf.ones([batch_size, 1])], axis=1)
linear_out = tf.matmul(linear_in, self._w)
# i = input_gate, j = new_input, f = forget_gate, o = output_gate
i, j, f, o = tf.split(value=linear_out, num_or_size_splits=4, axis=1)
new_c = (c * tf.sigmoid(f + self._forget_bias) +
tf.sigmoid(i) * tf.tanh(j))
new_h = tf.tanh(new_c) * tf.sigmoid(o)
return new_c, new_h
def bayesian_rnn(cell, inputs, seq_len):
batch_size = tf.shape(inputs)[0]
initializer = (tf.zeros([batch_size, 128]), tf.zeros([batch_size, 128]))
c_list, h_list = tf.scan(cell, inputs, initializer=initializer)
relevant_outputs = tf.gather_nd(
h_list, tf.stack([seq_len - 1, tf.range(batch_size)], axis=1))
logits = tf.squeeze(tf.layers.dense(relevant_outputs, 1), -1)
return logits
seq_len=5
with zs.BayesianNet() as model:
cell = BayesianLSTMCell(128, forget_bias=0.)
logits = bayesian_rnn(cell, b, seq_len)
_ = zs.Bernoulli(Y, logits, dtype=tf.float32
The text was updated successfully, but these errors were encountered: