[Bug fix] redundant layers in ResNet

In https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/deep_residual_network/main.py#L115, it defined a length 4 `layers`. 
But in https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/deep_residual_network/main.py#L84, it only uses `layers[0]` and `layers[1]`. 
So the last entry of [2,2,2,2] should be redundant.
This commit is contained in:
keineahnung2345
2018-11-06 17:54:07 +08:00
committed by GitHub
parent 6f28678302
commit 606d0aa188

View File

@ -82,8 +82,8 @@ class ResNet(nn.Module):
self.bn = nn.BatchNorm2d(16)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self.make_layer(block, 16, layers[0])
self.layer2 = self.make_layer(block, 32, layers[0], 2)
self.layer3 = self.make_layer(block, 64, layers[1], 2)
self.layer2 = self.make_layer(block, 32, layers[1], 2)
self.layer3 = self.make_layer(block, 64, layers[2], 2)
self.avg_pool = nn.AvgPool2d(8)
self.fc = nn.Linear(64, num_classes)
@ -112,7 +112,7 @@ class ResNet(nn.Module):
out = self.fc(out)
return out
model = ResNet(ResidualBlock, [2, 2, 2, 2]).to(device)
model = ResNet(ResidualBlock, [2, 2, 2]).to(device)
# Loss and optimizer