美文网首页
L2正则化

L2正则化

作者: sumpig | 来源:发表于2019-02-10 16:09 被阅读0次

#定义
w = tf.Variable(tf.random_normal([2, 1], stddev=1, seed=1))
y = tf.matmul(x, w)

loss = tf.reduce_mean(tf.square(y_ - y)) +
        tf.contrib.layers.l2_regularizer(lambbda)(w)


#5层神经网络的L2正则化

def get_weight(shape, lambd):
    var = tf.Variable(tf.random_normal(shape), dtype=tf.float32)
    tf.add_to_collection(
        'losses', tf.contrib.layers.l2_regularizer(lambd)(var))
    return var


x = tf.placeholder(tf.float32, shape=(None, 2))
y_ = tf.placeholder(tf.float32, shape=(None, 1))

layer_dimension = [2, 10, 10, 10, 1]
n_layers = len(layer_dimension)

cur_layer = x
in_dimension = layer_dimension[0]

for i in range(1, n_layers):
    out_dimension = layer_dimension[i]
    weight = get_weight([in_dimension, out_dimension], 0.001)
    bias = tf.Variable(tf.constant(0.1, shape=[out_dimension]))
    cur_layer = tf.nn.relu(tf.matmul(cur_layer, weight) + bias)
    in_dimension = layer_dimension[i]

mes_loss = tf.reduce_mean(tf.square(y_ - cur_layer))
tf.add_to_collection('losses', mes_loss)
loss = tf.add_n(tf.get_collection('losses'))

相关文章

网友评论

      本文标题:L2正则化

      本文链接:https://www.haomeiwen.com/subject/bshxeqtx.html