美文网首页
machine learning - gradient desc

machine learning - gradient desc

作者: wokeman | 来源:发表于2017-10-22 17:00 被阅读56次

---data.csv-----

52.211796692214001,79.64197304980874

39.299566694317065,59.171489321869508

48.10504169176825,75.331242297063056

52.550014442733818,71.300879886850353

45.419730144973755,55.165677145959123

54.351634881228918,82.478846757497919

44.164049496773352,62.008923245725825

58.16847071685779,75.392870425994957

56.727208057096611,81.43619215887864

48.955888566093719,60.723602440673965

44.687196231480904,82.892503731453715

60.297326851333466,97.379896862166078

45.618643772955828,48.847153317355072

38.816817537445637,56.877213186268506

66.189816606752601,83.878564664602763

65.41605174513407,118.59121730252249

47.48120860786787,57.251819462268969

41.57564261748702,51.391744079832307

51.84518690563943,75.380651665312357

59.370822011089523,74.765564032151374

57.31000343834809,95.455052922574737

-------------------------------------

from numpy import *

# y = mx + b

# m is slope, b is y-intercept

def compute_error_for_line_given_points(b, m, points):

totalError = 0

for i in range(0, len(points)):

x = points[i, 0]

y = points[i, 1]

totalError += (y - (m * x + b)) ** 2

return totalError / float(len(points))

def step_gradient(b_current, m_current, points, learningRate):

b_gradient = 0

m_gradient = 0

nlen = len(points)

print "---step_gradient: b_current={0} m_current={1}----".format(b_current, m_current)

N = float(len(points))

for i in range(0, nlen):

x = points[i, 0]

y = points[i, 1]

b_gradient += -(2/N) * (y - ((m_current * x) + b_current))

m_gradient += -(2/N) * x * (y - ((m_current * x) + b_current))

print "x={0},y={1},b_gradient={2},m_gradient={3}".format(x, y,b_gradient,m_gradient)

new_b = b_current - (learningRate * b_gradient)

new_m = m_current - (learningRate * m_gradient)

print "-------------new_b={0} = {1} - ({2} * {3})".format(new_b,b_current, learningRate, b_gradient)

print "-------------new_m={0} = {1} - ({2} * {3})".format(new_m,m_current, learningRate, m_gradient)

return [new_b, new_m]

def gradient_descent_runner(points, starting_b, starting_m, learning_rate, num_iterations):

b = starting_b

m = starting_m

for i in range(num_iterations):

b, m = step_gradient(b, m, array(points), learning_rate)

return [b, m]

def run():

points = genfromtxt("data.csv", delimiter=",")

learning_rate = 0.0005

initial_b = 0 # initial y-intercept guess

initial_m = 0 # initial slope guess

num_iterations = 200

print "len(points)={0}".format(len(points))

print "Starting gradient descent at b = {0}, m = {1}, error = {2}".format(initial_b, initial_m, compute_error_for_line_given_points(initial_b, initial_m, points))

print "Running..."

[b, m] = gradient_descent_runner(points, initial_b, initial_m, learning_rate, num_iterations)

print "After {0} iterations b = {1}, m = {2}, error = {3}".format(num_iterations, b, m, compute_error_for_line_given_points(b, m, points))

if __name__ == '__main__':

run()

相关文章

网友评论

      本文标题:machine learning - gradient desc

      本文链接:https://www.haomeiwen.com/subject/xgxnuxtx.html