美文网首页
Linear Regression

Linear Regression

作者: lucientlau | 来源:发表于2017-09-23 12:14 被阅读0次

Code

#coding=utf-8
import tensorflow as tf
import matplotlib.pyplot as plt

#build train data
x = tf.lin_space(1.0,10.0,100)
rand_x = x
y = tf.Variable(10+10*rand_x+5*tf.random_normal(tf.shape(x)))

init = tf.global_variables_initializer()
with tf.Session() as sess:
    sess.run(init)
    x_r = sess.run(rand_x)
    y_r = sess.run(y)

plt.scatter(x_r,y_r,c='r')

#build learn model
train_x = tf.placeholder(dtype=tf.float32)
train_y = tf.placeholder(dtype=tf.float32)

W = tf.Variable(tf.random_normal([1]))
b = tf.Variable(tf.random_normal([1]))

predict_y = tf.multiply(W,train_x)+b

loss = tf.losses.softmax_cross_entropy([[0],[1]],[tf.abs(predict_y-train_y)/(2*train_y),(train_y+predict_y)/(2*train_y)])

#loss = tf.losses.absolute_difference(train_y,predict_y)

train_model = tf.train.GradientDescentOptimizer(0.8).minimize(loss)

with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for times in range(1005):
        sess.run(train_model, feed_dict={train_x: x_r[times % 100], train_y : y_r[times % 100]})
        trained_w = sess.run(W)
        trained_b = sess.run(b)
        print(trained_w,trained_b)
        if times > 1000:
            plt.plot(x_r,trained_w*x_r+trained_b)

plt.show()

主要函数tf.losses.softmax_cross_entropy

tf.losses.softmax_cross_entropy(p,q)

Tensorflow计算步骤如下:

  • 对 q 做 softmax处理 ek/Σ(ei)
  • 求cross entropy计算公式为Σ(pilnqi) [ tf.log是以自然对数e为底求对数 ]

Tensorflow softmax_cross_entropy Test

#coding=utf-8
import tensorflow as tf
import matplotlib.pyplot as plt

#build train data
x = tf.lin_space(1.0,10.0,100)
rand_x = x
y = tf.Variable(10+30*rand_x+5*tf.random_normal(tf.shape(x)))

init = tf.global_variables_initializer()
with tf.Session() as sess:
    sess.run(init)
    x_r = sess.run(rand_x)
    y_r = sess.run(y)

plt.scatter(x_r,y_r,c='r')

#build learn model
train_x = tf.placeholder(dtype=tf.float32)
train_y = tf.placeholder(dtype=tf.float32)

W = tf.Variable(tf.random_normal([1]))
b = tf.Variable(tf.random_normal([1]))

predict_y = tf.multiply(W,train_x)+b

loss = tf.losses.softmax_cross_entropy([[0],[1]],[tf.abs(predict_y-train_y)/(2*train_y),(train_y+predict_y)/(2*train_y)])

#loss = tf.losses.absolute_difference(train_y,predict_y)


train_model = tf.train.GradientDescentOptimizer(10).minimize(loss)

logits_tmp = tf.constant([0,1.0])
soft_logits = tf.nn.softmax(logits_tmp)
label_tmp = tf.constant([0.0,1.0])
cross_entropy = label_tmp*tf.log(soft_logits)
cross_entropy_sum = tf.reduce_sum(cross_entropy)

tf_exp_value = tf.exp(1.0)
tf_log_value = tf.log(tf_exp_value)
with tf.Session() as sess:
    sess.run(tf.global_variables_initializer())
    for times in range(1005):
        sess.run(train_model, feed_dict={train_x: x_r[times % 100], train_y : y_r[times % 100]})
        trained_w = sess.run(W)
        trained_b = sess.run(b)
        print(trained_w,trained_b)
        if times > 1000:
            plt.plot(x_r,trained_w*x_r+trained_b)

    print('softmax cross entropy',sess.run(tf.nn.softmax_cross_entropy_with_logits(labels = [0,1],logits = [0,1.0])))

    print("soft logit ",sess.run(soft_logits))
    print("cross entropy", sess.run(cross_entropy))
    print("cross entropy sum", sess.run(cross_entropy_sum))
    print("tf log value ",sess.run(tf_log_value))
    print("tf exp value", sess.run(tf_exp_value))
#plt.show()

相关文章

网友评论

      本文标题:Linear Regression

      本文链接:https://www.haomeiwen.com/subject/zrtcextx.html