当前位置:   article > 正文

Bi-LSTM的理解以及 Tensorflow实现_bi-lstm tensorflow

bi-lstm tensorflow

Bidirectional LSTM,由两个LSTMs上下叠加在 一起组成。输出由这两个LSTMs的隐藏层的状态决定。

  1. def bilstm(self,x):
  2. # 输入的数据格式转换
  3. # x.shape [batch_size, time_steps, input_size]
  4. x=tf.transpose(x,[1,0,2])
  5. fw_x = tf.reshape(x, [-1, self.n_input_text]) # step*batch, feature
  6. fw_x = tf.split(0, self.n_step_text, fw_x)
  7. with tf.variable_scope('bilstm_lt'):
  8. #定义Cell,单层LSTM
  9. lstm_fw_cell = rnn_cell.BasicLSTMCell(self.n_hidden_text, forget_bias=1.0, state_is_tuple=True)#前向的lstm cell
  10. lstm_bw_cell = rnn_cell.BasicLSTMCell(self.n_hidden_text, forget_bias=1.0, state_is_tuple=True)#反向的rnn cell
  11. #dropout
  12. lstm_fw_cell = rnn_cell.DropoutWrapper(cell=lstm_fw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob)
  13. lstm_bw_cell = rnn_cell.DropoutWrapper(cell=lstm_bw_cell, input_keep_prob=1.0, output_keep_prob=keep_prob)
  14. #构建双向的RNN网络
  15. with tf.variable_scope('fw_lt'):
  16. (output_fw, state_fw) = rnn.rnn(lstm_fw_cell,fw_x,dtype=tf.float32)
  17. t=tf.convert_to_tensor(output_fw)
  18. print (t.get_shape().as_list())
  19. with tf.variable_scope('bw_lt'):
  20. bw_x = tf.reverse(x, [True,False,False])# reverse time dim
  21. bw_x = tf.reshape(bw_x, [-1, self.n_input_text]) # step*batch, feature
  22. bw_x = tf.split(0, self.n_step_text, bw_x)
  23. (output_bw, state_bw) = rnn.rnn(lstm_bw_cell,bw_x,dtype=tf.float32)
  24. # output_bw.shape = [timestep_size, batch_size, hidden_size]
  25. output_bw = tf.reverse(output_bw, [True,False,False])
  26. output = tf.concat(2,[output_fw, output_bw])#在第2个维度上,将output_fw, output_bw拼接
  27. return output#返回值:(outputs, output_states:最后一层隐藏层)

 

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小蓝xlanll/article/detail/488153
推荐阅读
相关标签
  

闽ICP备14008679号