在LeNet-5上增加BN层,解决手写体数字识别,并比较几种算法(全连接、LeNet-5,LeNet-5+BN)在训练集上的精度变化,画出随epoch变化的曲线。 tensorflow实现
时间: 2024-03-06 10:52:15 浏览: 57
以下是使用TensorFlow实现在LeNet-5上增加BN层解决手写体数字识别,并比较几种算法在训练集上的精度变化的代码:
```python
import tensorflow as tf
from tensorflow.keras import layers, models, datasets
import matplotlib.pyplot as plt
# 加载手写体数字数据集
(train_images, train_labels), (test_images, test_labels) = datasets.mnist.load_data()
# 数据预处理
train_images = train_images / 255.0
test_images = test_images / 255.0
# 定义LeNet-5模型
def LeNet5():
model = models.Sequential()
model.add(layers.Conv2D(filters=6, kernel_size=(5, 5), activation='relu', input_shape=(28, 28, 1)))
model.add(layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Conv2D(filters=16, kernel_size=(5, 5), activation='relu'))
model.add(layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Flatten())
model.add(layers.Dense(units=120, activation='relu'))
model.add(layers.Dense(units=84, activation='relu'))
model.add(layers.Dense(units=10, activation='softmax'))
return model
# 定义LeNet-5+BN模型
def LeNet5_BN():
model = models.Sequential()
model.add(layers.Conv2D(filters=6, kernel_size=(5, 5), input_shape=(28, 28, 1)))
model.add(layers.BatchNormalization())
model.add(layers.Activation('relu'))
model.add(layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Conv2D(filters=16, kernel_size=(5, 5)))
model.add(layers.BatchNormalization())
model.add(layers.Activation('relu'))
model.add(layers.MaxPooling2D(pool_size=(2, 2)))
model.add(layers.Flatten())
model.add(layers.Dense(units=120))
model.add(layers.BatchNormalization())
model.add(layers.Activation('relu'))
model.add(layers.Dense(units=84))
model.add(layers.BatchNormalization())
model.add(layers.Activation('relu'))
model.add(layers.Dense(units=10, activation='softmax'))
return model
# 定义全连接模型
def FullyConnected():
model = models.Sequential()
model.add(layers.Flatten(input_shape=(28, 28)))
model.add(layers.Dense(units=256, activation='relu'))
model.add(layers.Dense(units=128, activation='relu'))
model.add(layers.Dense(units=10, activation='softmax'))
return model
# 编译模型
LeNet5_model = LeNet5()
LeNet5_model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
LeNet5_BN_model = LeNet5_BN()
LeNet5_BN_model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
FullyConnected_model = FullyConnected()
FullyConnected_model.compile(optimizer='adam', loss='sparse_categorical_crossentropy', metrics=['accuracy'])
# 训练模型
LeNet5_history = LeNet5_model.fit(train_images.reshape(-1, 28, 28, 1), train_labels, epochs=10, validation_data=(test_images.reshape(-1, 28, 28, 1), test_labels))
LeNet5_BN_history = LeNet5_BN_model.fit(train_images.reshape(-1, 28, 28, 1), train_labels, epochs=10, validation_data=(test_images.reshape(-1, 28, 28, 1), test_labels))
FullyConnected_history = FullyConnected_model.fit(train_images, train_labels, epochs=10, validation_data=(test_images, test_labels))
# 绘制训练曲线
plt.plot(LeNet5_history.history['accuracy'], label='LeNet-5')
plt.plot(LeNet5_BN_history.history['accuracy'], label='LeNet-5+BN')
plt.plot(FullyConnected_history.history['accuracy'], label='Fully Connected')
plt.xlabel('Epoch')
plt.ylabel('Accuracy')
plt.legend()
plt.show()
```
这段代码会分别训练LeNet-5、LeNet-5+BN和全连接模型,并绘制出它们在训练集上的精度随着epoch变化的曲线。可以通过比较这三个模型的训练曲线,来判断增加BN层是否对模型的训练效果有提升。
阅读全文