看了吴老师的视频,准备练习一下课后作业,发现完全懵啊,从网上找到了答案,记录下来好好学习一下。
1.一元线性回归
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
path1 = './ex1data1.txt'
data2=pd.read_csv(path1,header=None,names=['Population', 'Profit'])
plt.figure(figsize=(20,8),dpi=80)
x=data2.loc[:,'Population']
y=data2.loc[:,'Profit']
plt.scatter(x,y)
plt.show()
#梯度下降
def computeCost(X, y, theta):
inner = np.power(((X * theta.T) - y), 2)
return np.sum(inner) / (2 * len(X))
#这个部分计算J(Ѳ),X是矩阵
#添加一个全为1,列名为Ones的一列
data2.insert(0, 'Ones', 1)
cols = data2.shape[1]
X = data2.iloc[:,0:cols-1]#X是所有行,去掉最后一列
y = data2.iloc[:,cols-1:cols]#X是所有行,最后一列
print(X.head())
#生成X和y矩阵
X = np.asmatrix(X.values)
y = np.asmatrix(y.values)
theta = np.asmatrix(np.array([0,0]))
print(X.shape, theta.shape, y.shape)
print(computeCost(X, y, theta))
计算代价函数
1.当𝜃0,𝜃1为0时
假设函数ℎ𝜃(𝑥)=𝜃𝑇𝑋=𝜃0𝑥0+𝜃1𝑥1
代价函数𝐽(𝜃)=12𝑚∑𝑖=1𝑚(ℎ𝜃(𝑥(𝑖))−𝑦(𝑖))2
def computeCost(X, y, theta):
inner = np.power(((X * theta.T) - y), 2)
return np.sum(inner) / (2 * len(X))
这个函数就是用来计算代价函数𝐽(𝜃)的值,当不进行调参,使用𝜃0,𝜃1为0时进行计算,我们可以计算𝐽(𝜃)的值是32.072733877455676
2.梯度下降算法计算𝜃值
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
path1 = './ex1data1.txt'
data=pd.read_csv(path1,header=None,names=['Population', 'Profit'])
data.insert(0, 'Ones', 1)
print(data.head())
cols = data.shape[1]
X = data.iloc[:,0:cols-1]#X是所有行,去掉最后一列
y = data.iloc[:,cols-1:cols]#X是所有行,最后一列
X = np.asmatrix(X.values)
y = np.asmatrix(y.values)
theta = np.asmatrix(np.array([0,0]))
#--------------------------------------------------------------------------------
#计算代价函数的值ℎ𝜃(𝑥)
def computeCost(X, y, theta):
inner = np.power(((X * theta.T) - y), 2)
return np.sum(inner) / (2 * len(X))
#当参数的值都为0时,计算代价函数的值
print(computeCost(X,y,theta))
#--------------------------------------------------------------------------------
# batch gradient decent(批量梯度下降)
# 𝜃𝑗:=𝜃𝑗−𝛼∂∂𝜃𝑗𝐽(𝜃)
#调整𝜃𝑗
def gradientDescent(X, y, theta, alpha, iters):
#生成一个(1,2)的矩阵
temp = np.asmatrix(np.zeros(theta.shape))
parameters = int(theta.ravel().shape[1])
cost = np.zeros(iters)
for i in range(iters):
error = (X * theta.T) - y
for j in range(parameters):
#矩阵对应位置相乘,矩阵相乘使用np.dot()
term = np.multiply(error, X[:, j])
temp[0, j] = theta[0, j] - ((alpha / len(X)) * np.sum(term))
theta = temp
cost[i] = computeCost(X, y, theta)
return theta, cost
#学习率
alpha = 0.01
#迭代次数
iters = 1000
#--------------------------------------------------------------------------------
g, cost = gradientDescent(X, y, theta, alpha, iters)
#迭代1000次以后对应的𝜃𝑗值
print('g{}\n'.format(g))
print(computeCost(X, y, g))
#--------------------------------------------------------------------------------
#numpy.linspace(start, stop, num=50, endpoint=True, retstep=False, dtype=None)
#产生从start到stop的等差数列,num为元素个数,默认50个
x = np.linspace(data.Population.min(), data.Population.max(), 100)
f = g[0, 0] + (g[0, 1] * x)
# fig, ax = plt.subplots(figsize=(12,8))
# ax.plot(x, f, 'r', label='Prediction')
# # ax.scatter(data.Population, data.Profit, label='Traning Data')
# # ax.legend(loc=2)
plt.figure(figsize=(20,8),dpi=80)
plt.plot(x,f,color='red',label='Prediction')
plt.scatter(data.Population, data.Profit, label='Traning Data')
plt.xlabel('Population')
plt.ylabel('Profit')
plt.title('Predicted Profit vs. Population Size')
plt.show()
#---------------------------------------------------------------------------------
plt.figure(figsize=(20,8),dpi=80)
plt.plot(np.arange(iters), cost, 'r')
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.title('Error vs. Training Epoch')
plt.show()
画图
image.png
image.png
3.多元线性回归梯度下降
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
from mpl_toolkits.mplot3d import Axes3D
path = './ex1data2.txt'
data2 = pd.read_csv(path, header=None, names=['Size', 'Bedrooms', 'Price'])
print(data2.head())
#计算标准差,这里用来特正化归一
data2 = (data2 - data2.mean()) / data2.std()
print(data2.head())
def computeCost(X, y, theta):
inner = np.power(((X * theta.T) - y), 2)
return np.sum(inner) / (2 * len(X))
def gradientDescent(X, y, theta, alpha, iters):
#生成一个(1,2)的矩阵
temp = np.asmatrix(np.zeros(theta.shape))
parameters = int(theta.ravel().shape[1])
cost = np.zeros(iters)
for i in range(iters):
error = (X * theta.T) - y
for j in range(parameters):
#矩阵对应位置相乘,矩阵相乘使用np.dot()
term = np.multiply(error, X[:, j])
temp[0, j] = theta[0, j] - ((alpha / len(X)) * np.sum(term))
theta = temp
cost[i] = computeCost(X, y, theta)
return theta, cost
#学习率
alpha = 0.01
#迭代次数
iters = 1000
# add ones column
data2.insert(0, 'Ones', 1)
# set X (training data) and y (target variable)
cols = data2.shape[1]
X2 = data2.iloc[:,0:cols-1]
y2 = data2.iloc[:,cols-1:cols]
# convert to matrices and initialize theta
X2 = np.asmatrix(X2.values)
y2 = np.asmatrix(y2.values)
theta2 = np.asmatrix(np.array([0,0,0]))
# perform linear regression on the data set
g2, cost2 = gradientDescent(X2, y2, theta2, alpha, iters)
# get the cost (error) of the model
print(computeCost(X2, y2, g2))
fig, ax = plt.subplots(figsize=(12,8))
ax.plot(np.arange(iters), cost2, 'r')
ax.set_xlabel('Iterations')
ax.set_ylabel('Cost')
ax.set_title('Error vs. Training Epoch')
plt.show()
image.png
4.正规方程
from matplotlib import pyplot as plt
import numpy as np
import pandas as pd
from sklearn import linear_model
path1 = './ex1data1.txt'
data=pd.read_csv(path1,header=None,names=['Population', 'Profit'])
plt.figure(figsize=(20,8),dpi=80)
data.insert(0, 'Ones', 1)
cols = data.shape[1]
X = data.iloc[:,0:cols-1]#X是所有行,去掉最后一列
y = data.iloc[:,cols-1:cols]#X是所有行,最后一列
X = np.asmatrix(X.values)
y = np.asmatrix(y.values)
theta = np.asmatrix(np.array([0,0]))
model = linear_model.LinearRegression()
model.fit(X, y)
#转化成一维数组
x = np.array(X[:, 1].A1)
#进行预测转化成一维数组
f = model.predict(X).flatten()
fig, ax = plt.subplots(figsize=(12,8))
ax.plot(x, f, 'r', label='Prediction')
ax.scatter(data.Population, data.Profit, label='Traning Data')
ax.legend(loc=2)
ax.set_xlabel('Population')
ax.set_ylabel('Profit')
ax.set_title('Predicted Profit vs. Population Size')
plt.show()
# 正规方程
def normalEqn(X, y):
#计算逆矩阵
theta = np.linalg.inv(X.T@X)@X.T@y#X.T@X等价于X.T.dot(X)
return theta
final_theta2=normalEqn(X, y)#感觉和批量梯度下降的theta的值有点差距
print(final_theta2)
image.png
image.png
网友评论