pytorch中文官方教程(三)——神经网络

1、代码

import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np


class Net(nn.Module):

    def __init__(self):
        super(Net, self).__init__()
        # 1 input image channel, 6 output channels, 5x5 square convolution
        # kernel
        self.conv1 = nn.Conv2d(in_channels=1, out_channels=6, kernel_size=5)
        self.conv2 = nn.Conv2d(in_channels=6, out_channels=16, kernel_size=5)
        # an affine operation: y = Wx + b
        self.fc1 = nn.Linear(in_features=16 * 5 * 5, out_features=120)  # 5*5 from image dimension
        self.fc2 = nn.Linear(in_features=120, out_features=84)
        self.fc3 = nn.Linear(in_features=84, out_features=10)

    def forward(self, x):
        # Max pooling over a (2, 2) window
        x = F.max_pool2d(F.relu(self.conv1(x)), (2, 2))
        # If the size is a square, you can specify with a single number如果大小为正方形,则可以使用单个数字指定
        x = F.max_pool2d(F.relu(self.conv2(x)), 2)
        x = torch.flatten(x, 1)  # flatten all dimensions except the batch dimension压平除批尺寸外的所有尺寸
        x = F.relu(self.fc1(x))
        x = F.relu(self.fc2(x))
        x = self.fc3(x)
        return x


net = Net()
print(f"net:{net}")


# 您只需要定义forward函数,就可以使用autograd为您自动定义backward函数(计算梯度)。 您可以在forward函数中使用任何张量操作。
# 模型的可学习参数由net.parameters()返回
params = net.parameters()
print(f"params1:{params}")
params = list(net.parameters())
# print(f"params2:{params}")
print(len(params))
print(params[0].size())  # conv1's .weight
np_array = params[2].detach().numpy()  #转为numpy
print(f"np_array:{np_array}")
# np.save('np_array_output', np_array)    #numpy 会自动加上.npy后缀
#
# # 读取
# b= np.load(file="np_array_output.npy")
# 让我们尝试一个32x32随机输入。 注意:该网络的预期输入大小(LeNet)为32x32。 要在 MNIST 数据集上使用此网络,请将图像从数据集中调整为32x32。
input = torch.randn(1, 1, 32, 32)
out = net(input)
print(f"out:{out}")

# 使用随机梯度将所有参数和反向传播的梯度缓冲区归零:
net.zero_grad()
out.backward(torch.randn(1, 10))
print(f"out:{out}")


# nn包下有几种不同的损失函数。 一个简单的损失是:nn.MSELoss,它计算输入和目标之间的均方误差。
output = net(input)
# print(f"output:{output}")
target = torch.randn(10)  # a dummy target, for example
print(f"target:{target}")
target = target.view(1, -1)  # make it the same shape as output
print(f"target:{target}")
criterion = nn.MSELoss()

loss = criterion(output, target)
print(f"loss:{loss}")

print(loss.grad_fn)  # MSELoss
print(loss.grad_fn.next_functions[0][0])  # Linear
print(loss.grad_fn.next_functions[0][0].next_functions[0][0])  # ReLU


"""
反向传播
"""
# 要反向传播误差,我们要做的只是对loss.backward()。 不过,您需要清除现有的梯度,否则梯度将累积到现有的梯度中。
# 现在,我们将其称为loss.backward(),然后看一下向后前后conv1的偏差梯度。

net.zero_grad()     # zeroes the gradient buffers of all parameters

print('conv1.bias.grad before backward')
print(net.conv1.bias.grad)

loss.backward()

print('conv1.bias.grad after backward')
print(net.conv1.bias.grad)

"""更新权重"""

learning_rate = 0.01
for f in net.parameters():
    f.data.sub_(f.grad.data * learning_rate)

# 但是,在使用神经网络时,您希望使用各种不同的更新规则,例如 SGD,Nesterov-SGD,Adam,RMSProp 等。
# 为实现此目的,我们构建了一个小包装:torch.optim,可实现所有这些方法。 使用它非常简单:

import torch.optim as optim

# create your optimizer
optimizer = optim.SGD(net.parameters(), lr=0.01)


# in your training loop:
optimizer.zero_grad()  # zero the gradient buffers
while True:
    output = net(input)
    print(f"output:{output}")
    print(f"loss:{loss}")
    print(f"target:{target}")
    loss = criterion(output, target)
    loss.backward()
    optimizer.step()    # Does the update
    print(f"loss:{loss}")
    if loss < 0.1:
        break


2、运行结果实例

F:\GoogLeNet-PyTorch-main\envs\Scripts\python.exe F:/my_pytorch/pytorch_official/3_神经网络.py
net:Net(
(conv1): Conv2d(1, 6, kernel_size=(5, 5), stride=(1, 1))
(conv2): Conv2d(6, 16, kernel_size=(5, 5), stride=(1, 1))
(fc1): Linear(in_features=400, out_features=120, bias=True)
(fc2): Linear(in_features=120, out_features=84, bias=True)
(fc3): Linear(in_features=84, out_features=10, bias=True)
)
params1:<generator object Module.parameters at 0x00000147E04E0120>
10
torch.Size([6, 1, 5, 5])
np_array:[[[[ 0.05833842 0.04485376 0.00520677 0.00677985 -0.07117731]
[-0.02676657 -0.02391099 -0.01395745 0.02284765 -0.0362525 ]
[-0.03594419 -0.00767535 -0.06659572 0.04796614 0.04664573]
[-0.03993097 -0.06797364 0.06735596 -0.00387863 0.06514972]
[ 0.03062629 -0.02491645 0.06595601 0.07559285 0.01755394]]

[[ 0.04095723 0.06774843 -0.05295545 -0.06646252 -0.05047309]
[-0.02611246 -0.07767513 0.06478816 -0.01631457 -0.05545238]
[-0.00708146 0.00629347 -0.00639763 -0.01611894 0.02365824]
[ 0.00470426 0.0262207 0.06925049 0.00251283 -0.05683168]
[-0.01021741 0.04823256 -0.06060085 0.01769973 -0.01658046]]

[[ 0.01585484 -0.00037831 0.01118787 -0.07673615 0.05527897]
[ 0.045431 -0.07097732 0.02998884 -0.04764434 0.00895855]
[-0.05223147 0.06077565 0.05085532 0.03022306 0.06449284]
[-0.04696747 -0.01772007 0.07271983 0.06550823 -0.05896288]
[-0.03379111 0.05127035 0.04712681 -0.00726073 -0.0621911 ]]

[[-0.03408932 0.02688809 -0.00764196 0.0480521 0.03772601]
[-0.05905587 0.06751327 0.06251156 -0.02213693 -0.02900776]
[ 0.05412185 -0.05162696 -0.03757326 0.00851532 -0.02591946]
[ 0.05634347 0.0071617 -0.0620188 0.07906039 0.05503839]
[ 0.0269302 0.038647 -0.01041483 -0.08010941 -0.00959574]]

[[ 0.01669768 -0.06706173 -0.03129892 -0.05798498 0.03610078]
[-0.01941751 -0.07328337 0.04740813 0.01469383 0.04847935]
[ 0.0722286 0.04690221 0.00131711 0.03214327 0.04583247]
[-0.02631403 0.07279135 0.05890578 0.06259954 -0.0148765 ]
[-0.07229041 -0.08139439 -0.07688598 -0.05773351 -0.05937801]]

[[-0.02154902 0.02131348 -0.05270907 0.01727174 0.01220027]
[ 0.07261547 -0.03791391 0.03443334 -0.01331498 0.04792809]
[ 0.03722789 -0.04128541 -0.03221434 -0.0093726 -0.01819651]
[-0.05102923 -0.03218967 -0.04345709 0.02760191 -0.05820008]
[-0.06754547 -0.03886261 -0.00621131 0.05015291 -0.05865147]]]

[[[-0.03074189 -0.07460986 -0.0802711 0.03176182 0.05875745]
[-0.06182509 -0.06738842 0.08111344 -0.06196103 -0.01849189]
[-0.01523799 -0.00772058 0.05031644 0.00209747 -0.03261258]
[-0.02087804 0.03855517 0.03377717 -0.0313202 -0.00724869]
[ 0.00522665 -0.00669205 -0.07206075 0.00076862 -0.0407453 ]]

[[-0.05580919 0.01964431 -0.04035901 -0.05358206 0.02775411]
[-0.06753397 -0.02956909 0.0533936 -0.06081207 -0.06346475]
[ 0.03493544 -0.0771985 0.05778378 -0.05585715 -0.02869531]
[-0.04010252 -0.00124037 0.03622927 -0.00024347 0.02376787]
[ 0.07890831 -0.04377113 -0.05774375 -0.04622785 0.08035801]]

[[-0.04841341 -0.04508851 0.01072896 0.02275266 0.0144262 ]
[-0.00420167 -0.01475674 0.06976983 0.03806551 -0.02597187]
[ 0.04965965 -0.01824944 0.02238213 -0.06253725 0.07696559]
[-0.059893 0.07614732 -0.04232056 -0.02802171 0.05441526]
[-0.08042884 0.06504701 -0.07798757 -0.00081861 -0.04262388]]

[[-0.05175752 0.0078494 0.07766941 0.05227689 -0.03009788]
[ 0.0003017 -0.03932573 0.01221286 -0.03045877 0.0037788 ]
[ 0.06033771 -0.03399793 0.02244548 -0.05072716 -0.05753922]
[-0.07897455 -0.03155319 0.02241135 -0.00945319 0.06275722]
[-0.06112634 -0.01908111 -0.06914476 -0.04425737 0.0339698 ]]

[[-0.0531757 -0.05513006 0.06721948 0.00721541 -0.07008936]
[ 0.05430038 0.07116559 0.00983638 -0.07947959 -0.01553421]
[-0.04617149 -0.00193577 0.06378368 0.06161514 0.02346542]
[ 0.06395271 0.00238354 -0.01265932 -0.00621088 -0.05415385]
[-0.05035484 0.04071591 0.0278881 0.0084021 0.00102012]]

[[-0.01540246 0.07410905 -0.01792446 0.02711874 0.03749741]
[ 0.06921411 0.0013111 -0.05237426 -0.05869495 -0.03518217]
[-0.0217754 0.00502159 -0.01404719 0.01342138 0.07571624]
[-0.04417125 -0.08017369 -0.00831387 0.00183293 0.0322882 ]
[-0.04022689 0.07742371 0.05578233 -0.06583556 -0.03075196]]]

[[[-0.00317992 -0.02034127 -0.06302359 0.01535182 -0.04060633]
[-0.07094187 -0.03030773 -0.04792562 -0.04841472 -0.00743858]
[-0.01479103 0.05510686 0.00684797 -0.00178859 -0.04464938]
[ 0.07959837 0.02316806 0.06003055 -0.02301068 0.02593046]
[-0.07063228 -0.07965354 -0.04028683 0.05704233 -0.00941087]]

[[-0.06676741 0.04467231 -0.05851445 -0.0180012 -0.06827388]
[ 0.00523023 0.04093356 -0.05286075 -0.03203585 -0.05347697]
[ 0.05467148 -0.07192026 0.01709792 0.02601182 0.03783248]
[ 0.04983565 -0.02380291 -0.02147909 -0.02854838 0.07092455]
[-0.06143802 -0.0698987 -0.00733104 -0.06272924 -0.04150885]]

[[-0.03064936 -0.01569932 0.01404302 0.07953554 -0.03030492]
[ 0.07459897 0.03675494 0.07933313 0.05165929 0.07041189]
[ 0.04488015 0.02981193 0.01462314 -0.00877947 -0.00168562]
[-0.04227184 -0.03814499 0.03547401 0.07109272 -0.02311806]
[ 0.06961077 -0.01487871 -0.03243158 0.04845127 0.071219 ]]

[[-0.03738621 0.05235545 0.03229122 -0.07465115 -0.05356836]
[-0.04913895 0.06060667 0.07667653 0.00172412 0.04799414]
[ 0.07097912 -0.05675437 -0.03708441 0.0385374 -0.06731688]
[ 0.0173446 -0.03989824 -0.06311176 0.04122189 0.05170029]
[-0.03431544 -0.05018158 0.07851332 0.05232517 -0.01361614]]

[[ 0.00353462 0.07480332 -0.06395368 -0.07758629 -0.05212572]
[-0.00667068 -0.03340125 0.02739219 -0.05303094 0.06041788]
[-0.03128929 0.01523136 -0.00489897 0.01753449 0.00705811]
[-0.00769962 0.05543272 -0.05749521 -0.04674577 -0.04866592]
[-0.05041301 -0.04594979 -0.0548308 0.05832472 0.01692708]]

[[ 0.07650769 -0.04964529 -0.02953799 -0.02547638 -0.05628888]
[-0.04610408 0.01417322 -0.05215215 0.06269309 -0.0015702 ]
[-0.0395941 -0.01128414 -0.07124647 0.02995747 0.03687772]
[ 0.06477076 0.0317946 -0.00153338 -0.07317141 0.0371979 ]
[ 0.03419238 0.07326029 0.04236545 -0.02126159 0.04811279]]]

...

[[[-0.0655131 0.07852668 -0.04240344 0.06906433 0.01607223]
[-0.05888374 0.00321472 0.00100838 -0.03317413 -0.00739475]
[-0.07778426 -0.00530422 -0.01598227 0.02826665 -0.03232602]
[-0.00186857 -0.07793272 -0.00341837 0.01049529 0.02816354]
[-0.07880098 0.02202167 0.0781143 0.03103034 0.01340881]]

[[ 0.05619016 0.01503343 0.04912129 -0.04526102 -0.03035641]
[-0.05252811 -0.06987026 0.02746947 0.07332651 -0.06545067]
[-0.00230126 -0.0740549 0.05594727 -0.01035311 0.05579515]
[-0.00405309 -0.02152161 0.03787332 -0.0109678 -0.03708292]
[ 0.0084689 0.03162884 0.03414609 0.04805352 0.05364895]]

[[ 0.01359501 0.02463956 0.01012418 0.03655083 0.0222628 ]
[-0.0507124 -0.07685303 -0.06896654 0.0746416 0.04367884]
[-0.02994882 0.07521465 0.08086723 0.03649962 0.05446836]
[ 0.06396289 -0.02586434 -0.00633202 0.01417354 -0.04355336]
[-0.06968849 -0.06008635 -0.0732436 0.05013072 0.05158842]]

[[ 0.00067848 0.02180934 0.03406563 0.02308601 -0.07502333]
[ 0.081597 -0.02402767 0.03334924 -0.04241988 0.0589997 ]
[ 0.01701637 -0.0265584 -0.07648052 -0.07301046 0.07839625]
[-0.07357181 0.07796946 0.02067035 0.05344361 -0.04334532]
[-0.03796834 0.03371929 0.08091754 0.07244951 0.07333718]]

[[ 0.03312905 0.04631685 0.07602006 0.07184899 -0.01132526]
[-0.07943547 -0.08058068 -0.03435636 -0.02629746 0.04027757]
[-0.07854019 -0.05695007 -0.02697153 0.04107029 -0.04464967]
[-0.01679472 0.02757148 0.01401249 -0.0648068 -0.02566296]
[-0.04888098 -0.06637972 -0.03729526 -0.07297038 -0.03517523]]

[[-0.02215242 -0.03986989 0.07879508 -0.05586541 0.02168757]
[ 0.06770049 0.04623573 -0.06375828 0.00636046 -0.06663159]
[ 0.06506264 -0.07058198 0.05019318 -0.0537327 0.02853857]
[-0.06931935 -0.00736772 0.05019541 0.07977463 -0.03636467]
[ 0.0127478 0.01676545 -0.07071766 -0.0244102 -0.06148995]]]

[[[ 0.04443924 0.06311543 -0.02854291 0.00017465 0.00845684]
[-0.01704324 0.07912111 0.02264932 0.00025532 -0.07249924]
[-0.01563711 0.0395488 -0.07304779 -0.01100056 0.02845428]
[ 0.00756434 0.06446849 0.04963434 -0.01109138 -0.01975176]
[-0.0133607 0.0507502 0.00808119 -0.05245356 0.07005191]]

[[ 0.06872346 0.02969486 0.0364247 0.02940438 -0.026554 ]
[-0.06731498 -0.03215693 0.04330937 0.02455214 0.03149453]
[ 0.00687312 0.04476212 -0.01402772 0.05495906 -0.07920688]
[-0.0336393 -0.01640602 0.02272104 0.03125171 0.04909472]
[-0.00802594 -0.02794876 -0.0114406 -0.00631234 0.07899223]]

[[-0.01684535 0.04192922 -0.05109882 -0.01641154 0.05281316]
[ 0.07100677 0.00344271 -0.00722076 -0.05655727 0.03308719]
[ 0.0655826 0.06034099 -0.04414678 0.03655959 0.08043131]
[-0.06130793 -0.0016823 -0.04963722 0.03042666 -0.03098541]
[ 0.05717367 0.03272972 0.0044715 0.02426715 -0.06241238]]

[[ 0.06030948 -0.01985756 0.07128723 0.01773231 -0.00891943]
[-0.05853341 0.02010246 -0.04701216 0.07921188 -0.0194376 ]
[ 0.06345344 -0.05349424 -0.01040523 -0.057973 0.00486866]
[ 0.01998056 -0.04676125 0.04054301 0.00843567 -0.06067165]
[ 0.02260017 0.00207823 0.0716978 -0.02497081 -0.0742869 ]]

[[-0.02969251 -0.03369959 -0.06612442 0.00207444 -0.03553671]
[-0.06438911 0.0738586 0.04504998 -0.03923283 0.02452799]
[ 0.01083156 -0.0598964 -0.01518132 0.07731852 0.00213366]
[ 0.02163759 0.07298803 -0.01625882 0.02393535 -0.05952712]
[ 0.03905367 -0.00196832 -0.05269146 -0.06938343 0.0196095 ]]

[[-0.05562783 -0.05576751 0.06572646 -0.06484099 -0.05308777]
[-0.023774 0.04456843 0.06507748 0.04767425 -0.01212334]
[ 0.05771488 -0.02943803 0.06950116 0.03998956 -0.07472735]
[ 0.03423354 -0.08053417 -0.06002307 -0.01023322 -0.07146225]
[ 0.03160604 -0.00993215 -0.00121893 -0.07966493 0.00723492]]]

[[[-0.00077646 0.01014007 -0.00270193 -0.07385581 -0.00418536]
[ 0.02191152 -0.02171976 -0.01028143 -0.06348906 0.07043551]
[-0.05462816 0.05635265 -0.07138167 -0.0323237 0.03681554]
[ 0.0361236 -0.04377059 -0.0433182 0.06900978 -0.08106091]
[-0.0790149 -0.02011753 -0.06605088 0.03279711 -0.00584047]]

[[-0.07118154 -0.0124172 0.06724495 -0.06903324 0.01624159]
[-0.03139743 0.0479362 -0.06939305 -0.04780738 -0.03550393]
[-0.04425024 -0.0740224 -0.01173043 -0.00016312 -0.02849669]
[-0.05269217 -0.06032303 0.06319709 -0.02134845 -0.0630547 ]
[-0.05693561 0.07129931 -0.05576558 0.04645866 -0.04025176]]

[[-0.06450045 -0.03569479 0.08150902 0.0083897 -0.07000449]
[ 0.0813199 0.05583113 -0.04669885 -0.00388476 0.05428758]
[-0.01612283 0.01902004 -0.01371384 -0.04953157 -0.03532239]
[ 0.06965601 0.03603385 0.04275487 0.0239929 -0.06217102]
[ 0.03160857 -0.02488581 -0.01815676 -0.04912455 0.0262606 ]]

[[-0.06117489 0.03352536 0.01697867 0.02230772 0.07040846]
[-0.0011677 0.04850884 -0.04176005 0.06941895 -0.05185703]
[-0.04195692 -0.04649201 0.06250165 0.03948636 0.01212509]
[ 0.00681149 -0.07153529 0.03909827 -0.06630165 -0.05680084]
[ 0.04034705 0.01977783 -0.05019608 0.07497042 -0.06153087]]

[[-0.02465079 0.03713928 0.01436234 0.02065239 0.06850974]
[-0.01576759 -0.02022743 -0.04373847 0.02402123 -0.01784477]
[ 0.06616554 -0.0377811 -0.0765747 -0.05791226 0.03653907]
[ 0.04382497 0.00011224 0.0334944 0.03509398 -0.03091235]
[ 0.04201264 -0.05402004 -0.01831974 0.01556543 -0.06389555]]

[[ 0.07574269 -0.07560097 0.00931066 -0.06254017 -0.02528742]
[-0.06418625 -0.00590674 0.04705851 -0.05938897 0.02706973]
[ 0.02238147 -0.06462154 -0.04428705 0.02170857 0.05969504]
[-0.04632733 -0.037647 0.01975768 -0.05393649 -0.04019649]
[-0.07993826 -0.01877686 0.07604019 0.07256329 0.01503196]]]]
out:tensor([[ 0.1326, -0.0513, -0.0150, 0.0507, 0.0931, 0.0535, 0.0775, -0.1664,
-0.0503, 0.1417]], grad_fn=)
out:tensor([[ 0.1326, -0.0513, -0.0150, 0.0507, 0.0931, 0.0535, 0.0775, -0.1664,
-0.0503, 0.1417]], grad_fn=)
target:tensor([-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426])
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.43096691370010376
<MseLossBackward0 object at 0x00000147DF02E6B0>
<AddmmBackward0 object at 0x00000147DF02E710>
<AccumulateGrad object at 0x00000147DF02E710>
conv1.bias.grad before backward
tensor([0., 0., 0., 0., 0., 0.])
conv1.bias.grad after backward
tensor([ 0.0111, 0.0082, 0.0087, 0.0154, 0.0031, -0.0037])
output:tensor([[ 0.1197, -0.0454, -0.0117, 0.0335, 0.0827, 0.0563, 0.0759, -0.1597,
-0.0446, 0.1278]], grad_fn=)
loss:0.43096691370010376
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.41965851187705994
output:tensor([[ 0.1074, -0.0394, -0.0095, 0.0167, 0.0728, 0.0591, 0.0746, -0.1541,
-0.0384, 0.1144]], grad_fn=)
loss:0.41965851187705994
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.4090551733970642
output:tensor([[ 0.0836, -0.0285, -0.0055, -0.0151, 0.0536, 0.0642, 0.0723, -0.1435,
-0.0265, 0.0884]], grad_fn=)
loss:0.4090551733970642
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.38927096128463745
output:tensor([[ 0.0510, -0.0148, 0.0005, -0.0568, 0.0266, 0.0712, 0.0670, -0.1299,
-0.0107, 0.0541]], grad_fn=)
loss:0.38927096128463745
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.3635726571083069
output:tensor([[ 0.0145, -0.0039, 0.0103, -0.0953, -0.0045, 0.0795, 0.0600, -0.1159,
0.0054, 0.0211]], grad_fn=)
loss:0.3635726571083069
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.3378603160381317
output:tensor([[-0.0138, 0.0062, 0.0287, -0.1267, -0.0298, 0.0950, 0.0548, -0.1017,
0.0213, -0.0100]], grad_fn=)
loss:0.3378603160381317
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.3151739239692688
output:tensor([[-0.0400, 0.0119, 0.0457, -0.1585, -0.0525, 0.1175, 0.0498, -0.0862,
0.0421, -0.0452]], grad_fn=)
loss:0.3151739239692688
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.29235631227493286
output:tensor([[-0.0733, 0.0153, 0.0626, -0.1960, -0.0780, 0.1454, 0.0433, -0.0680,
0.0690, -0.0816]], grad_fn=)
loss:0.29235631227493286
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.2670333683490753
output:tensor([[-0.1153, 0.0148, 0.0786, -0.2362, -0.1110, 0.1713, 0.0358, -0.0441,
0.1047, -0.1233]], grad_fn=)
loss:0.2670333683490753
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.23876813054084778
output:tensor([[-0.1687, 0.0143, 0.1004, -0.2846, -0.1495, 0.2001, 0.0254, -0.0146,
0.1497, -0.1750]], grad_fn=)
loss:0.23876813054084778
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.20608952641487122
output:tensor([[-0.2360, 0.0119, 0.1276, -0.3431, -0.1925, 0.2307, 0.0103, 0.0260,
0.2071, -0.2403]], grad_fn=)
loss:0.20608952641487122
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.16879495978355408
output:tensor([[-0.3219, 0.0064, 0.1636, -0.4161, -0.2445, 0.2688, -0.0076, 0.0819,
0.2797, -0.3259]], grad_fn=)
loss:0.16879495978355408
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.12683963775634766
output:tensor([[-0.4347, -0.0010, 0.2130, -0.5109, -0.3097, 0.3159, -0.0322, 0.1554,
0.3744, -0.4384]], grad_fn=)
loss:0.12683963775634766
target:tensor([[-0.8479, -0.0733, 0.5774, -0.8280, -0.3926, 0.3186, -0.2840, 0.6035,
0.5925, -0.7426]])
loss:0.08205084502696991

进程已结束,退出代码0

posted @ 2022-11-05 18:05  JaxonYe  阅读(129)  评论(0编辑  收藏  举报