Lenet是一个 7 层的神经网络,包含 3 个卷积层,2 个池化层,1 个全连接层。其中所有卷积层的所有卷积核都为 5x5,步长 strid=1,池化方法都为全局 pooling,激活函数为 Sigmoid,网络结构如下:
import torch
from torch import nn
# 定义一个网络模型: MyLeNet5
class MyLeNet5(nn.Module):
def __init__(self): # 正确初始化方法名为 '__init__'
super(MyLeNet5, self).__init__() # 正确调用父类初始化方法
# 这里补充MyLeNet5的具体层定义,以匹配原始LeNet-5结构
self.c1 = nn.Conv2d(in_channels=1, out_channels=6, kernel_size=5, padding=2 ) #[1,6,28,28]
self.Sigmoid = nn.Sigmoid()
self.s2 = nn.AvgPool2d(kernel_size=2, stride=2)
self.c3 = nn.Conv2d(in_channels=6, out_channels=16, kernel_size=5) #[6,16,10,10]
self.s4 = nn.AvgPool2d(kernel_size=2, stride=2)
self.c5 = nn.Conv2d(in_channels=16, out_channels=120, kernel_size=5) #[16,120,1,1]
self.flatten = nn.Flatten()
self.f6 = nn.Linear(120, 84)
self.output = nn.Linear(84, 10)
# 定义前向传播过程
def forward(self, x):
x = self.Sigmoid(self.c1(x))
x = self.s2(x)
x = self.Sigmoid(self.c3(x))
x = self.s4(x)
x = self.c5(x)
x = self.flatten(x)
x = self.f6(x)
x = self.output(x)
return x
if __name__=="__main__":
x = torch.rand([1, 1, 28, 28])
model = MyLeNet5()
print(model)
y = model(x)