SVM的主要思想可以概括为两点:
Hard Margin SVM

对于红点和蓝点每个点应满足的不等式条件

问题最终转化为

即最终转化为有条件的最优化问题

Soft Margin SVM

使用scikit-learn中的svm
from sklearn import datasets
import numpy as np
import matplotlib.pyplot as plt
#准备数据
iris = datasets.load_iris()
X = iris['data']
y = iris['target']
X = X[y<2,:2]
y = y[y<2]
#数据归一化(SVC涉及距离,应该使用数据归一化处理)
from sklearn.preprocessing import StandardScaler
stdScaler = StandardScaler()
stdScaler.fit(X)
X_standard = stdScaler.transform(X)
#实例化svc对象,训练模型
from sklearn.svm import LinearSVC
svc = LinearSVC(C=1e9)
svc.fit(X_standard,y)
def plot_svc_decision_boundary(model,axis):
x0,x1 = np.meshgrid(
np.linspace(axis[0],axis[1],int((axis[1]-axis[0])*100)),
np.linspace(axis[2],axis[3],int((axis[3]-axis[2])*100))
)
X_new = np.c_[x0.ravel(),x1.ravel()]
y_predict = model.predict(X_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A','#FFF59D','#90CAF9'])
plt.contourf(x0,x1,zz,linewidth=5,cmap=custom_cmap)
#除去决策边界外,还要画出svc支撑向量的线
w = model.coef_[0]
b = model.intercept_[0]
# w0x0 + w1x1 + b = 0
# => x1 = -w0/w1*w0-b/w1
plot_x = np.linspace(axis[0],axis[1],200)
up_y = -w[0]/w[1] * plot_x - b/w[1] +1/w[1]
down_y = -w[0]/w[1] * plot_x - b/w[1] -1/w[1]
up_index = (up_y>=axis[2])&(up_y<=axis[3])
down_index = (down_y>=axis[2])&(down_y<=axis[3])
plt.plot(plot_x[up_index],up_y[up_index],color='black')
plt.plot(plot_x[down_index],down_y[down_index],color='black')
plot_svc_decision_boundary(svc,axis=[-3,3,-3,3])
plt.scatter(X_standard[y==0,0],X_standard[y==0,1])
plt.scatter(X_standard[y==1,0],X_standard[y==1,1])
plt.show()


多项式特征应用于SVM
#使用制作数据的方法生成数据,噪音为0.15
X,y = datasets.make_moons(noise=0.15)
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import Pipeline
def PolynomialSVC(degree,C=1.0):
return Pipeline([
("poly",PolynomialFeatures(degree=degree)),
("std_standard",StandardScaler()),
("svc",LinearSVC(C=C))
])

使用多项式核函数的SVM
from sklearn.svm import SVC
def PolynomialKernelSVC(degree,C=1.0):
return Pipeline([
("std_scaler",StandardScaler()),
("kernelSVC",SVC(kernel="poly",degree=degree,C=C))
])
poly_kernel_svc = PolynomialKernelSVC(degree=5)
poly_kernel_svc.fit(X,y)
plot_decision_boundary(poly_kernel_svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()

原理




RBFKernel(高斯核函数)
gamma为高斯核的超参数
def RBFKernelSVC(gamma=1.0):
return Pipeline([
("std_sacler",StandardScaler()),
("svc",SVC(kernel="rbf",gamma=gamma))
])
svc = RBFKernelSVC()
svc.fit(X,y)
def plot_decision_boundary(model,axis):
x0,x1 = np.meshgrid(
np.linspace(axis[0],axis[1],int((axis[1]-axis[0])*100)),
np.linspace(axis[2],axis[3],int((axis[3]-axis[2])*100))
)
X_new = np.c_[x0.ravel(),x1.ravel()]
y_predict = model.predict(X_new)
zz = y_predict.reshape(x0.shape)
from matplotlib.colors import ListedColormap
custom_cmap = ListedColormap(['#EF9A9A','#FFF59D','#90CAF9'])
plt.contourf(x0,x1,zz,linewidth=5,cmap=custom_cmap)
plot_decision_boundary(svc,axis=[-1.5,2.5,-1.0,1.5])
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()



SVM思想解决回归问题

boston = datasets.load_boston()
X = boston['data']
y = boston['target']
from sklearn.model_selection import train_test_split
X_train,X_test,y_train,y_test = train_test_split(X,y)
from sklearn.svm import LinearSVR
#epsilon为超参数
def StandardLinearSVR(epsilon=0.1):
return Pipeline([
("std_scaler",StandardScaler()),
("svc",LinearSVR(epsilon=epsilon))
])
lin_svr = StandardLinearSVR()
lin_svr.fit(X_train,y_train)
lin_svr.score(X_test,y_test)
>>> 0.6735924094720267
网友评论