美文网首页
[写模型代码的时候]

[写模型代码的时候]

作者: VanJordan | 来源:发表于2018-12-23 16:00 被阅读5次
  • 单独定义一个文件叫layer.py,专门存放各种layer
  • 在模型文件graph.py里面,这样定义层,这样就和PyTorch里面的层定义的形式很相近了。
self.layers['embedding'] = layers_lib.Embedding(
            fg.vocab_size, fg.embedding_dims, fg.normalize_embeddings,
            self.vocab_freqs, fg.keep_prob_emb)

self.layers['lstm'] = layers_lib.LSTM(
            fg.rnn_cell_size, fg.rnn_num_layers, fg.keep_prob_lstm_out)

self.layers['lm_loss'] = layers_lib.SoftmaxLoss(
            fg.vocab_size,
            fg.num_candidate_samples,
            self.vocab_freqs,
            name='LM_loss')

调用的时候:

lstm_out, next_state = self.layers['lstm'](embedded, inputs.state,inputs.length)
class LSTM(object):
  """LSTM layer using dynamic_rnn.

  Exposes variables in `trainable_weights` property.
  """

  def __init__(self, cell_size, num_layers=1, keep_prob=1., name='LSTM'):
    self.cell_size = cell_size
    self.num_layers = num_layers
    self.keep_prob = keep_prob
    self.reuse = None
    self.trainable_weights = None
    self.name = name

  def __call__(self, x, initial_state, seq_length):
    with tf.variable_scope(self.name, reuse=self.reuse) as vs:
      cell = tf.contrib.rnn.MultiRNNCell([
          tf.contrib.rnn.BasicLSTMCell(
              self.cell_size,
              forget_bias=0.0,
              reuse=tf.get_variable_scope().reuse)
          for _ in xrange(self.num_layers)
      ])

      # shape(x) = (batch_size, num_timesteps, embedding_dim)

      lstm_out, next_state = tf.nn.dynamic_rnn(
          cell, x, initial_state=initial_state, sequence_length=seq_length)

      # shape(lstm_out) = (batch_size, timesteps, cell_size)

      if self.keep_prob < 1.:
        lstm_out = tf.nn.dropout(lstm_out, self.keep_prob)

      if self.reuse is None:
        self.trainable_weights = vs.global_variables()

    self.reuse = True

    return lstm_out, next_state

相关文章

网友评论

      本文标题:[写模型代码的时候]

      本文链接:https://www.haomeiwen.com/subject/mmsnkqtx.html