softmax实现cifar10分类
将cifar10改成单一通道后,套用前面的softmax分类,分类率40%左右,想哭。。。
In [1]:
%matplotlib inline
from mxnet.gluon import data as gdata
from mxnet import autograd,nd
import gluonbook as gb
import sys
In [2]:
cifar_train = gdata.vision.CIFAR10(train=True)
cifar_test = gdata.vision.CIFAR10(train=False)
In [3]:
(len(cifar_train),len(cifar_test))
Out[3]:
(50000, 10000)
In [4]:
feature,label = cifar_train[0]
In [5]:
feature.shape,feature.dtype
Out[5]:
((32, 32, 3), numpy.uint8)
In [6]:
label,type(label),label.dtype
Out[6]:
(6, numpy.int32, dtype('int32'))
In [7]:
batch_size = 256
transformer = gdata.vision.transforms.ToTensor()
In [8]:
if sys.platform.startswith('win'):
num_workers = 0 # 0 表示不用额外的进程来加速读取数据。
else:
num_workers = 4
train_iter = gdata.DataLoader(cifar_train.transform_first(transformer),
batch_size, shuffle=True,
num_workers=num_workers)
test_iter = gdata.DataLoader(cifar_test.transform_first(transformer),
batch_size, shuffle=False,
num_workers=num_workers)
In [9]:
len(train_iter)
Out[9]:
196
In [10]:
for X,y in train_iter:
print(X)
break
[[[[0.3137255 0.3019608 0.34509805 ... 0.2901961 0.3019608 0.34901962] [0.36078432 0.35686275 0.32941177 ... 0.23137255 0.2509804 0.3764706 ] [0.34509805 0.42352942 0.47058824 ... 0.1882353 0.19607843 0.3254902 ] ... [0.7529412 0.654902 0.5882353 ... 0.67058825 0.6627451 0.78039217] [0.72156864 0.60784316 0.5764706 ... 0.63529414 0.63529414 0.7372549 ] [0.65882355 0.6117647 0.6039216 ... 0.67058825 0.6627451 0.6901961 ]] [[0.3137255 0.28627452 0.3137255 ... 0.28627452 0.29803923 0.34509805] [0.36078432 0.34117648 0.3019608 ... 0.22745098 0.24705882 0.37254903] [0.34509805 0.40392157 0.44313726 ... 0.18431373 0.19215687 0.32156864] ... [0.8039216 0.7058824 0.6431373 ... 0.7019608 0.69803923 0.8156863 ] [0.7764706 0.6627451 0.6313726 ... 0.6666667 0.6666667 0.7764706 ] [0.7176471 0.6666667 0.65882355 ... 0.7019608 0.69803923 0.7254902 ]] [[0.21960784 0.2 0.23137255 ... 0.21176471 0.21960784 0.26666668] [0.26666668 0.2509804 0.21960784 ... 0.14901961 0.16862746 0.29411766] [0.2509804 0.31764707 0.36078432 ... 0.10588235 0.11372549 0.24313726] ... [0.6039216 0.5058824 0.4392157 ... 0.49803922 0.48235294 0.5882353 ] [0.5764706 0.4627451 0.43137255 ... 0.46666667 0.4627451 0.5529412 ] [0.5137255 0.46666667 0.45882353 ... 0.5137255 0.49803922 0.5137255 ]]] [[[0.14901961 0.14901961 0.15294118 ... 0.14509805 0.09411765 0.23137255] [0.15686275 0.15686275 0.16078432 ... 0.15686275 0.11372549 0.2509804 ] [0.16078432 0.16470589 0.16862746 ... 0.16862746 0.12941177 0.2627451 ] ... [0.16862746 0.12156863 0.14901961 ... 0.30588236 0.42352942 0.24313726] [0.16862746 0.1254902 0.13333334 ... 0.28235295 0.39607844 0.22352941] [0.16470589 0.1254902 0.09411765 ... 0.19607843 0.29411766 0.16862746]] [[0.15294118 0.15294118 0.15686275 ... 0.15294118 0.09803922 0.23529412] [0.16078432 0.16078432 0.16470589 ... 0.16470589 0.11764706 0.25490198] [0.16470589 0.16862746 0.17254902 ... 0.1764706 0.13725491 0.27058825] ... [0.17254902 0.1254902 0.14901961 ... 0.23137255 0.3019608 0.19607843] [0.16862746 0.1254902 0.13333334 ... 0.22745098 0.28627452 0.18039216] [0.16862746 0.12941177 0.09411765 ... 0.1764706 0.24705882 0.14901961]] [[0.13333334 0.13333334 0.13725491 ... 0.15686275 0.09019608 0.21568628] [0.14117648 0.14117648 0.14509805 ... 0.16862746 0.10980392 0.23529412] [0.14509805 0.14901961 0.15294118 ... 0.18039216 0.1254902 0.24705882] ... [0.14901961 0.10980392 0.13333334 ... 0.17254902 0.21960784 0.15686275] [0.14901961 0.11372549 0.12156863 ... 0.18431373 0.20392157 0.13333334] [0.14901961 0.11372549 0.08627451 ... 0.16078432 0.21176471 0.1254902 ]]] [[[0.07843138 0.08627451 0.10196079 ... 0.0627451 0.05490196 0.04705882] [0.10980392 0.08627451 0.11764706 ... 0.06666667 0.05490196 0.04705882] [0.09019608 0.07058824 0.09411765 ... 0.05882353 0.05882353 0.04705882] ... [0.18039216 0.16862746 0.1882353 ... 0.13725491 0.13725491 0.13333334] [0.14901961 0.15294118 0.16470589 ... 0.14901961 0.12941177 0.12156863] [0.13725491 0.14117648 0.15686275 ... 0.13725491 0.12156863 0.11764706]] [[0.08627451 0.09411765 0.10980392 ... 0.07058824 0.0627451 0.05490196] [0.12156863 0.09411765 0.1254902 ... 0.07450981 0.0627451 0.05490196] [0.10588235 0.08235294 0.10196079 ... 0.06666667 0.06666667 0.05490196] ... [0.19607843 0.1882353 0.2 ... 0.15294118 0.15294118 0.14509805] [0.16470589 0.17254902 0.1764706 ... 0.16078432 0.14117648 0.13333334] [0.15294118 0.16078432 0.16862746 ... 0.14901961 0.13333334 0.12941177]] [[0.07058824 0.07843138 0.09019608 ... 0.05882353 0.05098039 0.05098039] [0.10980392 0.07450981 0.10588235 ... 0.0627451 0.05490196 0.05098039] [0.08627451 0.05882353 0.08627451 ... 0.05490196 0.05490196 0.04705882] ... [0.16078432 0.14901961 0.16862746 ... 0.1254902 0.1254902 0.12156863] [0.12941177 0.13333334 0.14117648 ... 0.13333334 0.11372549 0.10588235] [0.11764706 0.1254902 0.13333334 ... 0.12156863 0.10588235 0.10196079]]] ... [[[0.20784314 0.36078432 0.85490197 ... 0.972549 0.9647059 0.96862745] [0.22745098 0.35686275 0.827451 ... 0.9764706 0.96862745 0.9647059 ] [0.3372549 0.5019608 0.90588236 ... 0.9764706 0.9764706 0.9647059 ] ... [0.08627451 0.08627451 0.05098039 ... 0.15294118 0.10980392 0.09803922] [0.14901961 0.09411765 0.05098039 ... 0.10980392 0.18431373 0.2784314 ] [0.3882353 0.27058825 0.14117648 ... 0.07058824 0.11764706 0.16470589]] [[0.09803922 0.24705882 0.8156863 ... 0.9411765 0.9254902 0.91764706] [0.14509805 0.25882354 0.7882353 ... 0.9372549 0.9254902 0.8980392 ] [0.2784314 0.43137255 0.88235295 ... 0.9372549 0.9411765 0.92941177] ... [0.06666667 0.07450981 0.05098039 ... 0.13725491 0.09411765 0.08235294] [0.14117648 0.09019608 0.05098039 ... 0.09803922 0.17254902 0.26666668] [0.3882353 0.27450982 0.14117648 ... 0.0627451 0.10980392 0.15686275]] [[0.10588235 0.26666668 0.827451 ... 0.9607843 0.9411765 0.92156863] [0.14117648 0.28627452 0.8156863 ... 0.94509804 0.9411765 0.9254902 ] [0.27450982 0.4392157 0.88235295 ... 0.9254902 0.9490196 0.96862745] ... [0.0627451 0.07058824 0.04313726 ... 0.13725491 0.09803922 0.09019608] [0.13333334 0.08235294 0.04313726 ... 0.09803922 0.1764706 0.27058825] [0.38039216 0.2627451 0.13333334 ... 0.06666667 0.11372549 0.16078432]]] [[[0.35686275 0.33333334 0.34901962 ... 0.19607843 0.1882353 0.1882353 ] [0.38431373 0.37254903 0.39215687 ... 0.25882354 0.27450982 0.2627451 ] [0.38431373 0.38039216 0.3882353 ... 0.2509804 0.25490198 0.24705882] ... [0.7764706 0.76862746 0.72156864 ... 0.76862746 0.77254903 0.77254903] [0.77254903 0.7647059 0.77254903 ... 0.76862746 0.76862746 0.77254903] [0.7647059 0.75686276 0.7529412 ... 0.75686276 0.7529412 0.75686276]] [[0.35686275 0.3372549 0.34509805 ... 0.20784314 0.20392157 0.19607843] [0.3882353 0.38039216 0.39607844 ... 0.26666668 0.2901961 0.2627451 ] [0.3882353 0.38039216 0.3882353 ... 0.2509804 0.26666668 0.25490198] ... [0.78039217 0.77254903 0.73333335 ... 0.76862746 0.77254903 0.77254903] [0.77254903 0.7647059 0.77254903 ... 0.76862746 0.76862746 0.77254903] [0.7647059 0.75686276 0.75686276 ... 0.7490196 0.7529412 0.75686276]] [[0.2901961 0.2627451 0.28235295 ... 0.13725491 0.13725491 0.13725491] [0.34901962 0.3372549 0.36078432 ... 0.20392157 0.21960784 0.2 ] [0.36078432 0.3529412 0.37254903 ... 0.20784314 0.21568628 0.21176471] ... [0.77254903 0.7607843 0.72156864 ... 0.7607843 0.7647059 0.7647059 ] [0.7647059 0.75686276 0.7607843 ... 0.7607843 0.7607843 0.7647059 ] [0.7607843 0.7529412 0.7490196 ... 0.74509805 0.74509805 0.7490196 ]]] [[[0.8745098 0.8784314 0.8784314 ... 0.8235294 0.8 0.7490196 ] [0.83137256 0.8235294 0.827451 ... 0.7647059 0.74509805 0.73333335] [0.8039216 0.79607844 0.8039216 ... 0.67058825 0.6313726 0.70980394] ... [0.40784314 0.3647059 0.34901962 ... 0.29803923 0.27450982 0.28235295] [0.41568628 0.36078432 0.35686275 ... 0.26666668 0.25882354 0.28627452] [0.3882353 0.3529412 0.34117648 ... 0.2784314 0.26666668 0.28235295]] [[0.8901961 0.89411765 0.89411765 ... 0.8117647 0.8039216 0.76862746] [0.84705883 0.8392157 0.84313726 ... 0.75686276 0.74509805 0.7529412 ] [0.81960785 0.8117647 0.81960785 ... 0.6627451 0.6313726 0.7294118 ] ... [0.3372549 0.31764707 0.30588236 ... 0.2784314 0.25490198 0.2627451 ] [0.32156864 0.29803923 0.29411766 ... 0.23921569 0.23529412 0.25882354] [0.29411766 0.28235295 0.27450982 ... 0.2509804 0.24705882 0.25882354]] [[0.9372549 0.9411765 0.9411765 ... 0.85490197 0.8627451 0.8352941 ] [0.89411765 0.8862745 0.8901961 ... 0.79607844 0.8039216 0.81960785] [0.8666667 0.85882354 0.8666667 ... 0.7019608 0.6901961 0.79607844] ... [0.23921569 0.20784314 0.19607843 ... 0.30588236 0.2627451 0.2627451 ] [0.23529412 0.2 0.19607843 ... 0.26666668 0.23137255 0.2509804 ] [0.21960784 0.2 0.1882353 ... 0.27058825 0.23921569 0.2509804 ]]]] <NDArray 256x3x32x32 @cpu(0)>
In [11]:
def wrapped_iter(data_iter):
for X, y in data_iter:
X = X[:, :1, :, :]
yield X, y
for X, y in wrapped_iter(train_iter):
print(X)
print(y)
break
for X, y in wrapped_iter(test_iter):
print(X)
print(y)
break
[[[[0.40784314 0.3882353 0.40392157 ... 0.2509804 0.23921569 0.22745098] [0.4 0.3882353 0.4 ... 0.2627451 0.2627451 0.23529412] [0.39607844 0.38039216 0.4 ... 0.2901961 0.2901961 0.26666668] ... [0.79607844 0.7882353 0.7882353 ... 0.59607846 0.58431375 0.5764706 ] [0.74509805 0.7607843 0.74509805 ... 0.6431373 0.62352943 0.6117647 ] [0.73333335 0.7254902 0.7372549 ... 0.6392157 0.6431373 0.6313726 ]]] [[[1. 0.99215686 0.96862745 ... 0.62352943 0.6862745 0.8627451 ] [1. 0.96862745 0.92156863 ... 0.5764706 0.6901961 0.7607843 ] [1. 0.95686275 0.8745098 ... 0.63529414 0.7529412 0.7607843 ] ... [0.49411765 0.5058824 0.58431375 ... 0.7019608 0.7294118 0.7490196 ] [0.6431373 0.69803923 0.7254902 ... 0.7019608 0.7137255 0.7176471 ] [0.8666667 0.9137255 0.8039216 ... 0.7058824 0.75686276 0.77254903]]] [[[0.5411765 0.5411765 0.5647059 ... 0.29411766 0.21960784 0.25882354] [0.58431375 0.56078434 0.5803922 ... 0.25490198 0.20392157 0.26666668] [0.61960787 0.5686275 0.57254905 ... 0.23137255 0.21960784 0.25882354] ... [0.59607846 0.6745098 0.70980394 ... 0.8352941 0.81960785 0.8 ] [0.60784316 0.6901961 0.70980394 ... 0.8980392 0.91764706 0.8156863 ] [0.6745098 0.75686276 0.7372549 ... 0.89411765 0.92156863 0.9098039 ]]] ... [[[0.20392157 0.21176471 0.2 ... 0.14509805 0.16862746 0.13725491] [0.19215687 0.20392157 0.21568628 ... 0.15294118 0.12156863 0.09019608] [0.22352941 0.20784314 0.19607843 ... 0.21176471 0.17254902 0.09803922] ... [0.49019608 0.47058824 0.5058824 ... 0.17254902 0.09411765 0.14509805] [0.5019608 0.5882353 0.7019608 ... 0.1882353 0.18039216 0.18039216] [0.42352942 0.5529412 0.68235296 ... 0.2 0.20784314 0.23137255]]] [[[0.6431373 0.5803922 0.5921569 ... 0.24313726 0.3647059 0.27450982] [0.69803923 0.6901961 0.5372549 ... 0.40392157 0.36078432 0.2901961 ] [0.44705883 0.65882355 0.6 ... 0.49803922 0.3529412 0.29411766] ... [0.827451 0.8039216 0.72156864 ... 0.25490198 0.25490198 0.29411766] [0.89411765 0.8156863 0.7490196 ... 0.23529412 0.25882354 0.2901961 ] [0.91764706 0.8392157 0.65882355 ... 0.22352941 0.22745098 0.27058825]]] [[[0.04313726 0.07843138 0.14117648 ... 0.31764707 0.3254902 0.25882354] [0.03529412 0.0627451 0.10980392 ... 0.3254902 0.28235295 0.2627451 ] [0.01960784 0.05098039 0.07843138 ... 0.27450982 0.23529412 0.2901961 ] ... [0.2627451 0.2901961 0.2509804 ... 0.32941177 0.34901962 0.3254902 ] [0.24313726 0.21176471 0.1882353 ... 0.32941177 0.3137255 0.28627452] [0.28235295 0.24705882 0.21960784 ... 0.3254902 0.29411766 0.26666668]]]] <NDArray 256x1x32x32 @cpu(0)> [2 9 4 7 3 1 3 5 9 6 2 9 4 4 9 5 3 7 2 9 3 2 1 4 3 1 0 6 7 4 4 0 5 6 3 3 8 2 6 1 8 1 4 0 7 1 4 8 4 5 1 0 6 8 1 0 8 4 4 7 0 9 9 2 6 4 4 2 7 3 4 3 0 0 9 2 4 0 7 6 5 9 6 5 0 0 0 6 7 8 8 7 7 8 7 9 3 4 4 6 1 0 5 6 0 6 6 7 1 8 9 2 2 5 2 9 9 8 6 2 4 3 1 7 0 2 4 8 3 6 3 7 2 4 4 9 2 3 7 0 6 9 4 9 6 6 7 6 8 2 5 4 7 6 0 2 9 5 9 3 1 5 9 2 1 7 7 0 5 0 5 2 3 9 7 1 3 5 5 7 0 6 2 3 1 5 3 6 2 2 5 7 0 7 5 8 5 9 7 0 7 2 8 1 7 4 2 3 8 6 1 6 1 6 0 8 8 8 7 9 4 2 6 6 9 1 5 2 5 1 4 6 1 8 9 2 4 7 0 4 3 3 6 5 9 4 1 0 2 5 9 3 1 6 6 6] <NDArray 256 @cpu(0)> [[[[0.61960787 0.62352943 0.64705884 ... 0.5372549 0.49411765 0.45490196] [0.59607846 0.5921569 0.62352943 ... 0.53333336 0.49019608 0.46666667] [0.5921569 0.5921569 0.61960787 ... 0.54509807 0.50980395 0.47058824] ... [0.26666668 0.16470589 0.12156863 ... 0.14901961 0.05098039 0.15686275] [0.23921569 0.19215687 0.13725491 ... 0.10196079 0.11372549 0.07843138] [0.21176471 0.21960784 0.1764706 ... 0.09411765 0.13333334 0.08235294]]] [[[0.92156863 0.90588236 0.9098039 ... 0.9137255 0.9137255 0.9098039 ] [0.93333334 0.92156863 0.92156863 ... 0.9254902 0.9254902 0.92156863] [0.92941177 0.91764706 0.91764706 ... 0.92156863 0.92156863 0.91764706] ... [0.34117648 0.16862746 0.07450981 ... 0.6627451 0.7137255 0.7372549 ] [0.32156864 0.18039216 0.14117648 ... 0.68235296 0.7254902 0.73333335] [0.33333334 0.24313726 0.22745098 ... 0.65882355 0.7058824 0.7294118 ]]] [[[0.61960787 0.61960787 0.54509807 ... 0.89411765 0.92941177 0.93333334] [0.6666667 0.6745098 0.5921569 ... 0.9098039 0.9647059 0.9647059 ] [0.68235296 0.6901961 0.6156863 ... 0.9019608 0.98039216 0.9607843 ] ... [0.12156863 0.11764706 0.10196079 ... 0.14509805 0.03529412 0.01568628] [0.09019608 0.10588235 0.09803922 ... 0.07450981 0.01568628 0.01960784] [0.10980392 0.11764706 0.1254902 ... 0.01960784 0.01568628 0.02745098]]] ... [[[0.2627451 0.26666668 0.27450982 ... 0.28235295 0.2784314 0.27450982] [0.27058825 0.2784314 0.28627452 ... 0.2901961 0.2901961 0.28627452] [0.2784314 0.28235295 0.28627452 ... 0.29411766 0.2901961 0.28627452] ... [0.35686275 0.3882353 0.37254903 ... 0.30980393 0.34901962 0.3647059 ] [0.33333334 0.35686275 0.34901962 ... 0.27058825 0.26666668 0.28235295] [0.3254902 0.3372549 0.33333334 ... 0.2627451 0.26666668 0.25882354]]] [[[0.7254902 0.7058824 0.6745098 ... 0.6156863 0.59607846 0.54901963] [0.7921569 0.69411767 0.63529414 ... 0.6039216 0.5764706 0.5529412 ] [0.7176471 0.6392157 0.627451 ... 0.5764706 0.5764706 0.5803922 ] ... [0.6901961 0.62352943 0.6156863 ... 0.37254903 0.31764707 0.29803923] [0.6784314 0.6392157 0.67058825 ... 0.39215687 0.38431373 0.36078432] [0.64705884 0.59607846 0.62352943 ... 0.47843137 0.5176471 0.46666667]]] [[[0.8 0.8039216 0.8156863 ... 0.8352941 0.84705883 0.84705883] [0.80784315 0.8156863 0.827451 ... 0.8352941 0.8235294 0.827451 ] [0.7882353 0.7921569 0.80784315 ... 0.78431374 0.76862746 0.76862746] ... [0.5058824 0.50980395 0.52156866 ... 0.45882353 0.5137255 0.5294118 ] [0.49411765 0.49803922 0.5058824 ... 0.4627451 0.5176471 0.5254902 ] [0.4862745 0.49019608 0.49803922 ... 0.4509804 0.49803922 0.5058824 ]]]] <NDArray 256x1x32x32 @cpu(0)> [3 8 8 0 6 6 1 6 3 1 0 9 5 7 9 8 5 7 8 6 7 0 4 9 5 2 4 0 9 6 6 5 4 5 9 2 4 1 9 5 4 6 5 6 0 9 3 9 7 6 9 8 0 3 8 8 7 7 4 6 7 3 6 3 6 2 1 2 3 7 2 6 8 8 0 2 9 3 3 8 8 1 1 7 2 5 2 7 8 9 0 3 8 6 4 6 6 0 0 7 4 5 6 3 1 1 3 6 8 7 4 0 6 2 1 3 0 4 2 7 8 3 1 2 8 0 8 3 5 2 4 1 8 9 1 2 9 7 2 9 6 5 6 3 8 7 6 2 5 2 8 9 6 0 0 5 2 9 5 4 2 1 6 6 8 4 8 4 5 0 9 9 9 8 9 9 3 7 5 0 0 5 2 2 3 8 6 3 4 0 5 8 0 1 7 2 8 8 7 8 5 1 8 7 1 3 0 5 7 9 7 4 5 9 8 0 7 9 8 2 7 6 9 4 3 9 6 4 7 6 5 1 5 8 8 0 4 0 5 5 1 1 8 9 0 3 1 9 2 2 5 3 9 9 4 0] <NDArray 256 @cpu(0)>
In [12]:
from mxnet import gluon, init
from mxnet.gluon import loss as gloss, nn
In [13]:
net = nn.Sequential()
net.add(nn.Dense(10))
net.initialize(init.Normal(sigma=0.01))
In [14]:
loss = gloss.SoftmaxCrossEntropyLoss()
In [25]:
trainer = gluon.Trainer(net.collect_params(), 'sgd', {'learning_rate': 0.0001})
In [26]:
num_epochs = 100
gb.train_ch3(net, train_iter, test_iter, loss, num_epochs, batch_size, None,
None, trainer)
epoch 1, loss 1.6195, train acc 0.457, test acc 0.410 epoch 2, loss 1.6196, train acc 0.457, test acc 0.411 epoch 3, loss 1.6181, train acc 0.457, test acc 0.411 epoch 4, loss 1.6183, train acc 0.457, test acc 0.411 epoch 5, loss 1.6191, train acc 0.457, test acc 0.410 epoch 6, loss 1.6196, train acc 0.457, test acc 0.411 epoch 7, loss 1.6189, train acc 0.457, test acc 0.410 epoch 8, loss 1.6189, train acc 0.457, test acc 0.411 epoch 9, loss 1.6183, train acc 0.457, test acc 0.410 epoch 10, loss 1.6186, train acc 0.457, test acc 0.411 epoch 11, loss 1.6182, train acc 0.457, test acc 0.410 epoch 12, loss 1.6175, train acc 0.457, test acc 0.410 epoch 13, loss 1.6181, train acc 0.457, test acc 0.410 epoch 14, loss 1.6182, train acc 0.457, test acc 0.411 epoch 15, loss 1.6192, train acc 0.457, test acc 0.410 epoch 16, loss 1.6191, train acc 0.457, test acc 0.411 epoch 17, loss 1.6182, train acc 0.457, test acc 0.410 epoch 18, loss 1.6176, train acc 0.457, test acc 0.410 epoch 19, loss 1.6175, train acc 0.458, test acc 0.410 epoch 20, loss 1.6182, train acc 0.457, test acc 0.410 epoch 21, loss 1.6178, train acc 0.457, test acc 0.410 epoch 22, loss 1.6180, train acc 0.457, test acc 0.410 epoch 23, loss 1.6178, train acc 0.457, test acc 0.411 epoch 24, loss 1.6179, train acc 0.457, test acc 0.411 epoch 25, loss 1.6178, train acc 0.457, test acc 0.411 epoch 26, loss 1.6180, train acc 0.457, test acc 0.411 epoch 27, loss 1.6181, train acc 0.457, test acc 0.410 epoch 28, loss 1.6172, train acc 0.457, test acc 0.410 epoch 29, loss 1.6177, train acc 0.457, test acc 0.411 epoch 30, loss 1.6170, train acc 0.458, test acc 0.410 epoch 31, loss 1.6162, train acc 0.458, test acc 0.410 epoch 32, loss 1.6184, train acc 0.457, test acc 0.410 epoch 33, loss 1.6175, train acc 0.457, test acc 0.410 epoch 34, loss 1.6174, train acc 0.457, test acc 0.411 epoch 35, loss 1.6173, train acc 0.457, test acc 0.411 epoch 36, loss 1.6177, train acc 0.457, test acc 0.411 epoch 37, loss 1.6174, train acc 0.457, test acc 0.410 epoch 38, loss 1.6174, train acc 0.457, test acc 0.410 epoch 39, loss 1.6171, train acc 0.457, test acc 0.411 epoch 40, loss 1.6178, train acc 0.457, test acc 0.410 epoch 41, loss 1.6173, train acc 0.457, test acc 0.410 epoch 42, loss 1.6169, train acc 0.457, test acc 0.411 epoch 43, loss 1.6166, train acc 0.457, test acc 0.410 epoch 44, loss 1.6172, train acc 0.457, test acc 0.410 epoch 45, loss 1.6166, train acc 0.457, test acc 0.410 epoch 46, loss 1.6174, train acc 0.457, test acc 0.410 epoch 47, loss 1.6170, train acc 0.457, test acc 0.410 epoch 48, loss 1.6166, train acc 0.457, test acc 0.410 epoch 49, loss 1.6165, train acc 0.457, test acc 0.410 epoch 50, loss 1.6163, train acc 0.457, test acc 0.410 epoch 51, loss 1.6167, train acc 0.457, test acc 0.410 epoch 52, loss 1.6172, train acc 0.457, test acc 0.410 epoch 53, loss 1.6163, train acc 0.458, test acc 0.410 epoch 54, loss 1.6166, train acc 0.457, test acc 0.410 epoch 55, loss 1.6163, train acc 0.457, test acc 0.410 epoch 56, loss 1.6171, train acc 0.457, test acc 0.410 epoch 57, loss 1.6170, train acc 0.457, test acc 0.410 epoch 58, loss 1.6163, train acc 0.457, test acc 0.410 epoch 59, loss 1.6160, train acc 0.458, test acc 0.410 epoch 60, loss 1.6163, train acc 0.457, test acc 0.410 epoch 61, loss 1.6165, train acc 0.457, test acc 0.410 epoch 62, loss 1.6157, train acc 0.457, test acc 0.410 epoch 63, loss 1.6169, train acc 0.457, test acc 0.410 epoch 64, loss 1.6158, train acc 0.457, test acc 0.410 epoch 65, loss 1.6167, train acc 0.457, test acc 0.410 epoch 66, loss 1.6162, train acc 0.458, test acc 0.410 epoch 67, loss 1.6167, train acc 0.457, test acc 0.410 epoch 68, loss 1.6163, train acc 0.457, test acc 0.409 epoch 69, loss 1.6170, train acc 0.457, test acc 0.410 epoch 70, loss 1.6164, train acc 0.457, test acc 0.410 epoch 71, loss 1.6166, train acc 0.457, test acc 0.410 epoch 72, loss 1.6157, train acc 0.457, test acc 0.410 epoch 73, loss 1.6159, train acc 0.457, test acc 0.410 epoch 74, loss 1.6163, train acc 0.457, test acc 0.410 epoch 75, loss 1.6162, train acc 0.457, test acc 0.410 epoch 76, loss 1.6154, train acc 0.457, test acc 0.409 epoch 77, loss 1.6161, train acc 0.457, test acc 0.410 epoch 78, loss 1.6169, train acc 0.457, test acc 0.409 epoch 79, loss 1.6154, train acc 0.457, test acc 0.409 epoch 80, loss 1.6162, train acc 0.457, test acc 0.409 epoch 81, loss 1.6163, train acc 0.457, test acc 0.410 epoch 82, loss 1.6161, train acc 0.457, test acc 0.409 epoch 83, loss 1.6156, train acc 0.457, test acc 0.410 epoch 84, loss 1.6153, train acc 0.458, test acc 0.409 epoch 85, loss 1.6159, train acc 0.457, test acc 0.409 epoch 86, loss 1.6164, train acc 0.457, test acc 0.410 epoch 87, loss 1.6154, train acc 0.457, test acc 0.410 epoch 88, loss 1.6152, train acc 0.457, test acc 0.410 epoch 89, loss 1.6154, train acc 0.457, test acc 0.410 epoch 90, loss 1.6155, train acc 0.457, test acc 0.409 epoch 91, loss 1.6160, train acc 0.458, test acc 0.409 epoch 92, loss 1.6148, train acc 0.458, test acc 0.409 epoch 93, loss 1.6156, train acc 0.457, test acc 0.409 epoch 94, loss 1.6152, train acc 0.457, test acc 0.409 epoch 95, loss 1.6157, train acc 0.458, test acc 0.410 epoch 96, loss 1.6152, train acc 0.458, test acc 0.410 epoch 97, loss 1.6152, train acc 0.457, test acc 0.410 epoch 98, loss 1.6151, train acc 0.457, test acc 0.410 epoch 99, loss 1.6150, train acc 0.457, test acc 0.409 epoch 100, loss 1.6158, train acc 0.457, test acc 0.410
In [17]:
gb.train_ch3??
In [ ]: