pytorch用优化器优化张量
x = torch.ones(1, 5, requires_grad=True)
optimizer = optim.Adam({x}, lr=0.004)
for i in range(50):
optimizer.zero_grad()
x1=torch.Tensor([1,2,3,4,5])
#l=torch.sum(x-x1)
l=torch.sqrt(torch.sum((x1-x)**2))
l.backward()
optimizer.step()
print(l,x)