SVM

  1. SVM尝试寻找一个最优的决策边界
  2. 最大化margin(2d)
 

超参数:

  1. SVM模型有两个非常重要的参数C与gamma。其中 C是惩罚系数,即对误差的宽容度。c越高,说明越不能容忍出现误差,容易过拟合。C越小,容易欠拟合。C过大或过小,泛化能力变差
  2. gamma是选择RBF函数作为kernel后,该函数自带的一个参数。隐含地决定了数据映射到新的特征空间后的分布,gamma越大,支持向量越少,gamma值越小,支持向量越多。支持向量的个数影响训练与预测的速度。
 

距离推导

 

 

Hard Margin SVM

 

 

Soft Margin SVM

 

 

scikit-learn中的SVM

和knn一样,要做数据标准化处理

import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets

 
def plot_decision_boundary(model,axis):
    """决策边界"""
    x0, x1 = np.meshgrid(
        np.linspace(axis[0],axis[1],int((axis[1]-axis[0])*100)).reshape(-1,1),
        np.linspace(axis[2],axis[3],int((axis[3]-axis[2])*100)).reshape(-1,1)
#         np.linspace(axis[2],axis[3],int((axis[3]-axis[2])*100).reshape(-1,1))
    )
    X_new = np.c_[x0.ravel(),x1.ravel()]
    y_predict = model.predict(X_new)
    zz = y_predict.reshape(x0.shape)
    from matplotlib.colors import ListedColormap
    custom_cmap = ListedColormap(['#EF9A9A','#FFF59D','#90CAF9'])
    plt.contourf(x0,x1,zz,cmap=custom_cmap)
 

def plot_svc_decision_boundary(model, axis):
    x0, x1 = np.meshgrid(
        np.linspace(axis[0], axis[1], int((axis[1]-axis[0])*100)).reshape(-1, 1),
        np.linspace(axis[2], axis[3], int((axis[3]-axis[2])*100)).reshape(-1, 1),
    )
    X_new = np.c_[x0.ravel(), x1.ravel()]
​
    y_predict = model.predict(X_new)
    zz = y_predict.reshape(x0.shape)
​
    from matplotlib.colors import ListedColormap
    custom_cmap = ListedColormap(['#EF9A9A','#FFF59D','#90CAF9'])
    plt.contourf(x0, x1, zz, cmap=custom_cmap)
    w = model.coef_[0]
    b = model.intercept_[0]
    # w0*x0 + w1*x1 + b = 0 决策中边界
    # w0*x0 + w1*x1 + b = 1 决策上边界
    # w0*x0 + w1*x1 + b = -1 决策下边界
    # => x1 = -w0/w1 * x0 - b/w1
    plot_x = np.linspace(axis[0], axis[1], 200)
    up_y = -w[0]/w[1] * plot_x - b/w[1] + 1/w[1]
    down_y = -w[0]/w[1] * plot_x - b/w[1] - 1/w[1]
    up_index = (up_y >= axis[2]) & (up_y <= axis[3])
    down_index = (down_y >= axis[2]) & (down_y <= axis[3])
    plt.plot(plot_x[up_index], up_y[up_index], color='black')
    plt.plot(plot_x[down_index], down_y[down_index], color='black')
 

iris = datasets.load_iris()
X = iris.data
Y = iris.target
​
X = X[Y<2,:2]
Y = Y[Y<2]
​
plt.scatter(X[Y==0,0],X[Y==0,1],color='r')
plt.scatter(X[Y==1,0],X[Y==1,1],color='b')
plt.show()

  

 
 
 
 
from sklearn.preprocessing import StandardScaler
from sklearn.svm import LinearSVC
​
stds = StandardScaler()
stds.fit(X)
X_std = stds.transform(X)
​
svc = LinearSVC(C=1e9) # C是正则化系数,1e9相当于hard margin svm
svc.fit(X_std,Y)
​
# 绘制
plot_decision_boundary(svc,axis=[-3,3,-3,3])
plt.scatter(X_std[Y==0,0],X_std[Y==0,1],color='r')
plt.scatter(X_std[Y==1,0],X_std[Y==1,1],color='b')
plt.show()

  

 
 
 
stds = StandardScaler()
stds.fit(X)
X_std = stds.transform(X)
​
svc = LinearSVC(C=0.01) # C是正则化系数,soft margin svm
svc.fit(X_std,Y)
​
# 绘制
plot_decision_boundary(svc,axis=[-3,3,-3,3])
plt.scatter(X_std[Y==0,0],X_std[Y==0,1],color='r')
plt.scatter(X_std[Y==1,0],X_std[Y==1,1],color='b')
plt.show()

  

 
 
 
stds = StandardScaler()
stds.fit(X)
X_std = stds.transform(X)
​
svc = LinearSVC(C=1e9) # C是正则化系数,hard margin svm
svc.fit(X_std,Y)
​
# 绘制
plot_svc_decision_boundary(svc,axis=[-3,3,-3,3])
plt.scatter(X_std[Y==0,0],X_std[Y==0,1],color='r')
plt.scatter(X_std[Y==1,0],X_std[Y==1,1],color='b')
plt.show()

  

 
 
 
stds = StandardScaler()
stds.fit(X)
X_std = stds.transform(X)
​
svc = LinearSVC(C=1) # C是正则化系数,soft margin svm
svc.fit(X_std,Y)
​
# 绘制
plot_svc_decision_boundary(svc,axis=[-3,3,-3,3])
plt.scatter(X_std[Y==0,0],X_std[Y==0,1],color='r')
plt.scatter(X_std[Y==1,0],X_std[Y==1,1],color='b')
plt.show()

 

 
 
 
 

SVM中使用多项式特征

import sklearn.datasets
​
# 原始数据
X,y = datasets.make_moons()
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()
​
# 噪音数据
X,y = datasets.make_moons(noise=0.15)
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()
X = X[:,:2]
 
 
 
 
 
from sklearn.preprocessing import PolynomialFeatures, StandardScaler
from sklearn.svm import LinearSVC
from sklearn.pipeline import Pipeline
​
def PolynomialSVC(degree, C=1.0):
    return Pipeline([
        ("poly", PolynomialFeatures(degree=degree)),
        ("std_scaler", StandardScaler()),
        ("linearSVC", LinearSVC(C=C))
    ])
​
poly_svc = PolynomialSVC(degree=3)
poly_svc.fit(X, y)
​
plot_decision_boundary(poly_svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(X[y==0,0], X[y==0,1])
plt.scatter(X[y==1,0], X[y==1,1])
plt.show()

  

 
 
 

使用多项式核函数的SVM

from sklearn.svm import SVC
​
svc = SVC(degree=3,kernel="poly",coef0=1) # coef0 就是下图多项式和函数中的c
svc.fit(X,y)
plot_decision_boundary(svc, axis=[-1.5, 2.5, -1.0, 1.5])
plt.scatter(X[y==0,0], X[y==0,1])
plt.scatter(X[y==1,0], X[y==1,1])
plt.show()

 

 
 

什么是核函数

 

 

高斯核函数

 

m*n的数据映射成了m*m的数据

 

x = np.arange(-4,5,1)
y = np.int32((x>=-2)&(x<=2))
plt.scatter(x[y==0],[0 for i in range(len(x[y==0]))])
plt.scatter(x[y==1],[0 for i in range(len(x[y==1]))])
plt.show()

 

def gaosihe(x,l):
    gamma = 1.0
    return np.exp(-gamma*(x-l)**2)
​
x2 = np.array([[gaosihe(x[i],-1),gaosihe(x[i],1)] for i in range(len(x))],dtype=np.float32)
plt.scatter(x2[y==0,0],x2[y==0,1])
plt.scatter(x2[y==1,0],x2[y==1,1])
plt.show()

 

 
# 噪音数据
X,y = datasets.make_moons(noise=0.15)
plt.scatter(X[y==0,0],X[y==0,1])
plt.scatter(X[y==1,0],X[y==1,1])
plt.show()
X = X[:,:2]

 

from sklearn.preprocessing import StandardScaler
from sklearn.svm import SVC
​
def RbfKernelSvc(X,y,gamma=1.0):
    std = StandardScaler()
    std.fit(X)
    X = std.transform(X)
    svc_clf = SVC(kernel='rbf',gamma=gamma) # gamma越大越过拟合
    svc_clf.fit(X,y)
    plot_decision_boundary(svc_clf, axis=[-2, 2, -2.5, 2.5])
    plt.scatter(X[y==0,0], X[y==0,1])
    plt.scatter(X[y==1,0], X[y==1,1])
    plt.show()
 

 
for i in [0.1,0.5,1,5,10]:
    print(i)
    RbfKernelSvc(X,y,gamma=i) 

  

 
0.1
 
 
0.5
 
 
1
 
 
5
 
 
10
 
 

SVM思想解决回归问题

from sklearn import datasets
from sklearn.model_selection import train_test_split
​
boston = datasets.load_boston()
X= boston.data
y = boston.target
 

 
from sklearn.svm import SVR
from sklearn.preprocessing import StandardScaler
​
def StdSvr(X,y,kernel,epsilon=0.1,gamma=1.0):
    trainX,testX,trainY,testY = train_test_split(X,y)
    std = StandardScaler()
    std.fit(trainX)
    trainX = std.transform(trainX)
    testX = std.transform(testX)
    svm_reg = SVR(kernel=kernel,epsilon=epsilon,gamma=gamma)
    svm_reg.fit(trainX,trainY)
    print(svm_reg.score(testX,testY))
 
 
 
StdSvr(X,y,kernel="linear")
 
 
 
>>>0.7820063084145614

  

posted @ 2018-08-05 10:38  家迪的家  阅读(614)  评论(1编辑  收藏  举报