test4

class ResNet5(nn.Module): # ResNet5(BasicBlock, [1, 1, 1, 1], 51)
def init(self, block, num_blocks, num_classes=10, bn=False):
super(ResNet5, self).init()
self.bn = bn
self.in_planes = 8
self.conv1 = nn.Conv2d(1, 8, kernel_size=3,
stride=1, padding=1, bias=False)
if bn:
self.bn1 = nn.BatchNorm2d(8)
self.layer1 = self._make_layer(BasicBlock, 16, num_blocks[0], stride=2)
self.layer2 = self._make_layer(BasicBlock, 32, num_blocks[1], stride=2)
self.layer3 = self._make_layer(BasicBlockGroup, 32, num_blocks[2], stride=1)
self.layer4 = self._make_layer(BasicBlockGroup, 64, num_blocks[3], stride=2)
# self.linear1 = nn.Linear(512block.expansion, 64)
self.linear1 = nn.Linear(64
block.expansion, 64)
self.linear2 = nn.Linear(64, num_classes)
self.dropout = torch.nn.Dropout(0.5)
self.conv0 = nn.Conv2d(3, 1, kernel_size=2,
stride=2, padding=0, bias=False)

def _make_layer(self, block, planes, num_blocks, stride):
    strides = [stride] + [1]*(num_blocks-1)
    layers = []
    for stride in strides:
        layers.append(block(self.in_planes, planes, stride))
        self.in_planes = planes * block.expansion
    return nn.Sequential(*layers)

def forward(self, x):
    if self.bn:
        out = F.relu(self.bn1(self.conv1(self.conv0(x))))
    else:
        out = F.relu(self.conv1(self.conv0(x)))

    # if self.bn:
    #     out = F.relu(self.bn1(self.conv1(x)))
    # else:
    #     out = F.relu(self.conv1(x))
    out = self.layer1(out)
    out = self.layer2(out)
    out = self.layer3(out)
    out = self.layer4(out)
    out = F.avg_pool2d(out, 4)
    out = out.view(out.size(0), -1)
    out = self.linear1(out)
    out = F.relu(out)
    # out = self.dropout(out)
    out = self.linear2(out)

    # print(out)
    return out
posted @ 2021-09-03 17:59  哈哈哈喽喽喽  阅读(40)  评论(0编辑  收藏  举报