python 单变量线性回归
In [54]:
#初始化工作
import random
import numpy as np
import matplotlib.pyplot as plt
# This is a bit of magic to make matplotlib figures appear inline in the notebook
# rather than in a new window.
%matplotlib inline
plt.rcParams['figure.figsize'] = (10.0, 8.0) # set default size of plots
plt.rcParams['image.interpolation'] = 'nearest'
plt.rcParams['image.cmap'] = 'gray'
# Some more magic so that the notebook will reload external python modules;
# see http://stackoverflow.com/questions/1907993/autoreload-of-modules-in-ipython
%load_ext autoreload
%autoreload 2
In [55]:
print('Plotting Data ...')
def load_exdata(filename):
data = []
with open(filename, 'r') as f:
for line in f.readlines():
line = line.split(',')
current = [float(item) for item in line]
#5.5277,9.1302
data.append(current)
return data
data = load_exdata('ex1data1.txt');
data = np.array(data)
print(data.shape)
x = data[:, 0]; y = data[:,1]
m = data.shape[0]
#number of training examples
plt.plot(x,y,'rx')
plt.ylabel('Profit in $10,000s');
plt.xlabel('Population of City in 10,000s');
plt.title("Training data")
Out[55]:
In [56]:
x = x.reshape(-1,1)
# 添加一列1
X = np.hstack([x,np.ones((x.shape[0], 1))])
theta = np.zeros((2, 1))
y = y.reshape(-1,1)
#计算损失
def computeCost(X, y, theta):
m = y.shape[0]
J = (np.sum((X.dot(theta) - y)**2)) / (2*m)
#X (m,2) theta (2,1) = m*1
return J
#梯度下降
def gradientDescent(X, y, theta, alpha, num_iters):
m = y.shape[0]
# 存储历史误差
J_history = np.zeros((num_iters, 1))
for iter in range(num_iters):
# 对J求导,得到 alpha/m * (WX - Y)*x(i),
theta = theta - ( alpha/m) * X.T.dot(X.dot(theta) - y)
J_history[iter] = computeCost(X, y, theta)
return J_history,theta
iterations = 1500 #迭代次数
alpha = 0.01 #学习率
j = computeCost(X,y,theta)
J_history,theta = gradientDescent(X, y, theta, alpha, iterations)
print('Theta found by gradient descent: %f %f'%(theta[0][0],theta[1][0]))
plt.plot(J_history)
plt.ylabel('lost');
plt.xlabel('iter count')
Out[56]:
In [57]:
#number of training examples
plt.plot(data[:,0],data[:,1],'rx')
plt.plot(X[:,0], X.dot(theta), '-')
plt.ylabel('Profit in $10,000s');
plt.xlabel('Population of City in 10,000s');
plt.title("Training data")
Out[57]:
In [75]:
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
theta0_vals = np.linspace(-10, 10, 100)
theta1_vals = np.linspace(-10, 10, 100)
J_vals = np.zeros((theta0_vals.shape[0], theta1_vals.shape[0]));
# 填充J_vals
for i in range(theta0_vals.shape[0]):
for j in range(theta1_vals.shape[0]):
t = [theta0_vals[i],theta1_vals[j]]
J_vals[i,j] = computeCost(X, y, t)
fig = plt.figure()
ax = fig.gca(projection='3d')
theta0_vals, theta1_vals = np.meshgrid(theta0_vals, theta1_vals)
# Plot the surface.
surf = ax.plot_surface(theta0_vals, theta1_vals, J_vals, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
# 定制Z轴.
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%d'))
# Add a color bar which maps values to colors.
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
In [ ]:
关注作者
作者: JadePeng
出处:https://www.cnblogs.com/xiaoqi/p/6406476.html
版权:本文采用「署名-非商业性使用-相同方式共享 4.0 国际(欢迎转载,但未经作者同意必须保留此段声明,且在文章页面明显位置给出原文链接) 」知识共享许可协议进行许可。
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· 记一次.NET内存居高不下排查解决与启示
· 探究高空视频全景AR技术的实现原理
· 理解Rust引用及其生命周期标识(上)
· 浏览器原生「磁吸」效果!Anchor Positioning 锚点定位神器解析
· 没有源码,如何修改代码逻辑?
· 全程不用写代码,我用AI程序员写了一个飞机大战
· DeepSeek 开源周回顾「GitHub 热点速览」
· MongoDB 8.0这个新功能碉堡了,比商业数据库还牛
· 记一次.NET内存居高不下排查解决与启示
· 白话解读 Dapr 1.15:你的「微服务管家」又秀新绝活了