自己用python写一个线性支持向量机linearSVM
自己用python写一个线性支持向量机linearSVM
https://blog.csdn.net/iteapoy/article/details/117814830转自此网址
前言:要修改linearSVM的代码,想在网上找一个能用的代码,结果要么调用sklearn库,要么都复制粘贴同一款代码,写得太复杂了,而且有bug,在bing国际版上搜到了一个没有用SMO和拉格朗日算子求解的linearSVM代码,复制过来Mark一下。
完整代码:
import numpy as np
import matplotlib.pyplot as plt
import seaborn as sns
from sklearn import preprocessing
from sklearn.preprocessing import StandardScaler
class LinearSVMUsingSoftMargin:
def __init__(self, C=1.0):
self._support_vectors = None
self.C = C
self.beta = None
self.b = None
self.X = None
self.y = None
# n is the number of data points
self.n = 0
# d is the number of dimensions
self.d = 0
def __decision_function(self, X):
return X.dot(self.beta) + self.b
def __cost(self, margin):
return (1 / 2) * self.beta.dot(self.beta) + self.C * np.sum(np.maximum(0, 1 - margin))
def __margin(self, X, y):
return y * self.__decision_function(X)
def fit(self, X, y, lr=1e-3, epochs=500):
# Initialize Beta and b
self.n, self.d = X.shape
self.beta = np.random.randn(self.d)
self.b = 0
# Required only for plotting
self.X = X
self.y = y
loss_array = []
for _ in range(epochs):
margin = self.__margin(X, y)
loss = self.__cost(margin)
loss_array.append(loss)
misclassified_pts_idx = np.where(margin < 1)[0]
d_beta = self.beta - self.C * y[misclassified_pts_idx].dot(X[misclassified_pts_idx])
self.beta = self.beta - lr * d_beta
d_b = - self.C * np.sum(y[misclassified_pts_idx])
self.b = self.b - lr * d_b
self._support_vectors = np.where(self.__margin(X, y) <= 1)[0]
def predict(self, X):
return np.sign(self.__decision_function(X))
def score(self, X, y):
P = self.predict(X)
return np.mean(y == P)
def plot_decision_boundary(self):
plt.scatter(self.X[:, 0], self.X[:, 1], c=self.y, s=50, cmap=plt.cm.Paired, alpha=.7)
ax = plt.gca()
xlim = ax.get_xlim()
ylim = ax.get_ylim()
# create grid to evaluate model
xx = np.linspace(xlim[0], xlim[1], 30)
yy = np.linspace(ylim[0], ylim[1], 30)
YY, XX = np.meshgrid(yy, xx)
xy = np.vstack([XX.ravel(), YY.ravel()]).T
Z = self.__decision_function(xy).reshape(XX.shape)
# plot decision boundary and margins
ax.contour(XX, YY, Z, colors=['r', 'b', 'r'], levels=[-1, 0, 1], alpha=0.5,
linestyles=['--', '-', '--'], linewidths=[2.0, 2.0, 2.0])
# highlight the support vectors
ax.scatter(self.X[:, 0][self._support_vectors], self.X[:, 1][self._support_vectors], s=100,
linewidth=1, facecolors='none', edgecolors='k')
plt.show()
def load_data(cols):
iris = sns.load_dataset("iris")
iris = iris.tail(100)
le = preprocessing.LabelEncoder()
y = le.fit_transform(iris["species"])
X = iris.drop(["species"], axis=1)
if len(cols) > 0:
X = X[cols]
return X.values, y
if __name__ == '__main__':
# make sure the targets are (-1, +1)
cols = ["petal_length", "petal_width"]
X, y = load_data(cols)
y[y == 0] = -1
# scale the data
scaler = StandardScaler()
X = scaler.fit_transform(X)
# now we'll use our custom implementation
model = LinearSVMUsingSoftMargin(C=15.0)
model.fit(X, y)
print("train score:", model.score(X, y))
model.plot_decision_boundary()
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 开发者必知的日志记录最佳实践
· SQL Server 2025 AI相关能力初探
· Linux系列:如何用 C#调用 C方法造成内存泄露
· AI与.NET技术实操系列(二):开始使用ML.NET
· 记一次.NET内存居高不下排查解决与启示
· 开源Multi-agent AI智能体框架aevatar.ai,欢迎大家贡献代码
· Manus重磅发布:全球首款通用AI代理技术深度解析与实战指南
· 被坑几百块钱后,我竟然真的恢复了删除的微信聊天记录!
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
· 园子的第一款AI主题卫衣上架——"HELLO! HOW CAN I ASSIST YOU TODAY