torch:LayerNorm
import torch.nn as nn import torch # input = torch.randn(20, 5, 10, 10) # # With Learnable Parameters # m = nn.LayerNorm(input.size()[1:]) # # Without Learnable Parameters # m = nn.LayerNorm(input.size()[1:], elementwise_affine=False) # # Normalize over last two dimensions # m = nn.LayerNorm([10, 10]) # # Normalize over last dimension of size 10 # m = nn.LayerNorm(10) # # Activating the module # output = m(input) # input = torch.randn(10) input = torch.tensor([ 0.2618, 0.2526, 0.3785, 0.5963, -0.0758, -0.9603, -0.5442, 0.2270, -1.6566, 2.0631]) print(input.size()) m = nn.LayerNorm(input.size()) # m = nn.LayerNorm(input.size(), elementwise_affine=False) output = m(input) print(input) print(output)
E:\新脚本主文件夹\训练测试项目\venv3\Scripts\python.exe E:/新脚本主文件夹/训练测试项目/test_torch/LayerNorm.py torch.Size([10]) tensor([ 0.2618, 0.2526, 0.3785, 0.5963, -0.0758, -0.9603, -0.5442, 0.2270, -1.6566, 2.0631]) tensor([ 0.2203, 0.2105, 0.3441, 0.5753, -0.1380, -1.0767, -0.6351, 0.1834, -1.8157, 2.1320], grad_fn=<NativeLayerNormBackward>) Process finished with exit code 0