mirror of
https://github.com/yunjey/pytorch-tutorial.git
synced 2025-07-08 02:38:51 +08:00
[Bug fix] redundant layers in ResNet
In https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/deep_residual_network/main.py#L115, it defined a length 4 `layers`. But in https://github.com/yunjey/pytorch-tutorial/blob/master/tutorials/02-intermediate/deep_residual_network/main.py#L84, it only uses `layers[0]` and `layers[1]`. So the last entry of [2,2,2,2] should be redundant.
This commit is contained in:
@ -82,8 +82,8 @@ class ResNet(nn.Module):
|
||||
self.bn = nn.BatchNorm2d(16)
|
||||
self.relu = nn.ReLU(inplace=True)
|
||||
self.layer1 = self.make_layer(block, 16, layers[0])
|
||||
self.layer2 = self.make_layer(block, 32, layers[0], 2)
|
||||
self.layer3 = self.make_layer(block, 64, layers[1], 2)
|
||||
self.layer2 = self.make_layer(block, 32, layers[1], 2)
|
||||
self.layer3 = self.make_layer(block, 64, layers[2], 2)
|
||||
self.avg_pool = nn.AvgPool2d(8)
|
||||
self.fc = nn.Linear(64, num_classes)
|
||||
|
||||
@ -112,7 +112,7 @@ class ResNet(nn.Module):
|
||||
out = self.fc(out)
|
||||
return out
|
||||
|
||||
model = ResNet(ResidualBlock, [2, 2, 2, 2]).to(device)
|
||||
model = ResNet(ResidualBlock, [2, 2, 2]).to(device)
|
||||
|
||||
|
||||
# Loss and optimizer
|
||||
@ -166,4 +166,4 @@ with torch.no_grad():
|
||||
print('Accuracy of the model on the test images: {} %'.format(100 * correct / total))
|
||||
|
||||
# Save the model checkpoint
|
||||
torch.save(model.state_dict(), 'resnet.ckpt')
|
||||
torch.save(model.state_dict(), 'resnet.ckpt')
|
||||
|
Reference in New Issue
Block a user