深度学习线性回归

x_data = np.random.rand(100)
noise = np.random.normal(0,0.01,x_data.shape)
y_data = x_data*0.1 + 0.2 + noise

plt.scatter(x_data,y_data)
plt.show()

model = Sequential()
model.add(Dense(units=1,input_dim=1))
#sgd 随机梯度下降  mse 均方误差
model.compile(optimizer='sgd',loss='mse')

for step in range(3001):
    cost = model.train_on_batch(x_data, y_data)   
        
    if step % 500 == 0:
        print('cost:',cost)

W,b=model.layers[0].get_weights()
print('w:',W,'b:',b)

y_pred=model.predict(x_data)
plt.scatter(x_data,y_data)
plt.plot(x_data,y_pred,'r',lw=2)
plt.show()

 

posted @ 2022-06-28 22:58  China Soft  阅读(31)  评论(0编辑  收藏  举报