Pytorch 深度学习实践 第5讲
import torch
x_data = torch.tensor([[1.0], [2.0], [3.0]])
y_data = torch.tensor([[0.], [0.], [1.]])
class LogisticRessionModel(torch.nn.Module):
def __init__(self):
super(LogisticRessionModel, self).__init__()
self.linear = torch.nn.Linear(1, 1)
def forward(self, x):
y_pred = torch.sigmoid(self.linear(x))
return y_pred
model = LogisticRessionModel()
# 默认情况下,loss会基于element平均,如果size_average=False的话,loss会被累加。
criterion = torch.nn.BCELoss(size_average=True)
optimizer = torch.optim.SGD(model.parameters(), lr=0.01)
for opoch in range(100):
y_pred = model(x_data)
loss = criterion(y_pred, y_data)
print(opoch, loss.item())
loss.backward()
optimizer.step()
optimizer.zero_grad()
print('w= ', model.linear.weight.item())
print('b= ', model.linear.bias.item())
x_test = torch.tensor([[4.0]])
y_test = model(x_test)
print('y_pred= ', y_test.item())
BCE loss具体步骤,帮助理解。
import math
import torch
pred = torch.tensor([[-0.2], [0.2], [0.8]])
target = torch.tensor([[0.0], [0.0], [1.0]])
sigmoid = torch.nn.Sigmoid()
pred_s = sigmoid(pred)
print(pred_s)
result = 0
i = 0
for label in target:
if label.item() == 0:
result += math.log(1-pred_s[i].item())
else:
result += math.log(pred_s[i].item())
i += 1
result /= 3
print('bce: ', -result)
loss = torch.nn.BCELoss()
print('BCEloss:', loss(pred_s, target).item())
【推荐】国内首个AI IDE,深度理解中文开发场景,立即下载体验Trae
【推荐】编程新体验,更懂你的AI,立即体验豆包MarsCode编程助手
【推荐】抖音旗下AI助手豆包,你的智能百科全书,全免费不限次数
【推荐】轻量又高性能的 SSH 工具 IShell:AI 加持,快人一步
· Manus重磅发布:全球首款通用AI代理技术深度解析与实战指南
· 被坑几百块钱后,我竟然真的恢复了删除的微信聊天记录!
· 没有Manus邀请码?试试免邀请码的MGX或者开源的OpenManus吧
· 园子的第一款AI主题卫衣上架——"HELLO! HOW CAN I ASSIST YOU TODAY
· 【自荐】一款简洁、开源的在线白板工具 Drawnix