编写三层前向神经网络反向传播算法程序,对以下数据进行分类。 第一类10个样本(三维空间): [1.58, 2.32, -5.8],[0.67, 1.58,-4.78],[1.04, 1.01, -3.63],[-1.49, 2.18, -3.39], [-0.41,-4.73], [1.39, 3.16, 2.87],1.21,[1.20, 1.40, -1.89], [-0.92, 1.44,-3,22],[0.45, 1.33, -4.38],[-0.76, 0.84, -1.96] ;第二类10 个样本(三维空间): [0.21,0.03,-2.21],[0.37,0.28,-1.81],[0.18, 1.22, 0.16],[-0.24, 0.93,-1.01],[-1.18, 0.39, -0.39], [0.74, 0.96, -1.16],[-0.38, 1.94, -0.48], [0.02, 0.72, -0.17],[0.44, 1.31, -0.14], [0.46, 1.49, 0.68] ;第三类10个样本(三维空间): [-1.54, 1.17, 0.64][5.41, 3.45,-1.33],[1.55,0.99, 2.69],[1.68, 1.79,-0.87],[3.51, -0.22, -1.39],[1.86,3.19, 1.51],[1.40, -0.44, -0.92], [0.44, 0.83, 1.97], [0.25, 0.68, -0.99], [0.66,-0.45, 0.08] 。隐含层结点的激励函数采用tanh函数,输出层的激励函数采用sigmoid函数。目标函数采用平方误差准则函数。
时间: 2023-07-02 20:24:14 浏览: 212
这里提供一个Python版本的代码实现,使用NumPy库来进行矩阵运算。
```python
import numpy as np
# 定义tanh和sigmoid激励函数
def tanh(x):
return np.tanh(x)
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# 定义神经网络模型
class NeuralNetwork:
def __init__(self, input_dim, hidden_dim, output_dim):
# 初始化权重矩阵
self.W1 = np.random.randn(input_dim, hidden_dim) / np.sqrt(input_dim)
self.b1 = np.zeros((1, hidden_dim))
self.W2 = np.random.randn(hidden_dim, output_dim) / np.sqrt(hidden_dim)
self.b2 = np.zeros((1, output_dim))
def forward(self, X):
# 前向传播计算输出
self.z1 = np.dot(X, self.W1) + self.b1
self.a1 = tanh(self.z1)
self.z2 = np.dot(self.a1, self.W2) + self.b2
self.y = sigmoid(self.z2)
return self.y
def backward(self, X, y, learning_rate):
# 反向传播更新权重矩阵
delta3 = (self.y - y) * self.y * (1 - self.y)
dW2 = np.dot(self.a1.T, delta3)
db2 = np.sum(delta3, axis=0, keepdims=True)
delta2 = np.dot(delta3, self.W2.T) * (1 - self.a1 ** 2)
dW1 = np.dot(X.T, delta2)
db1 = np.sum(delta2, axis=0)
self.W2 -= learning_rate * dW2
self.b2 -= learning_rate * db2
self.W1 -= learning_rate * dW1
self.b1 -= learning_rate * db1
# 构造训练数据
X = np.array([[1.58, 2.32, -5.8], [0.67, 1.58, -4.78], [1.04, 1.01, -3.63], [-1.49, 2.18, -3.39],
[-0.41, -4.73, 1.21], [1.39, 3.16, 2.87], [1.20, 1.40, -1.89], [-0.92, 1.44, -3.22],
[0.45, 1.33, -4.38], [-0.76, 0.84, -1.96], [0.21, 0.03, -2.21], [0.37, 0.28, -1.81],
[0.18, 1.22, 0.16], [-0.24, 0.93, -1.01], [-1.18, 0.39, -0.39], [0.74, 0.96, -1.16],
[-0.38, 1.94, -0.48], [0.02, 0.72, -0.17], [0.44, 1.31, -0.14], [0.46, 1.49, 0.68],
[-1.54, 1.17, 0.64], [5.41, 3.45, -1.33], [1.55, 0.99, 2.69], [1.68, 1.79, -0.87],
[3.51, -0.22, -1.39], [1.86, 3.19, 1.51], [1.40, -0.44, -0.92], [0.44, 0.83, 1.97],
[0.25, 0.68, -0.99], [0.66, -0.45, 0.08]])
y = np.array([[1, 0, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [1, 0, 0], [0, 1, 0], [0, 1, 0], [0, 1, 0],
[0, 1, 0], [0, 1, 0], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1],
[0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1],
[0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1], [0, 0, 1]])
# 定义模型参数
input_dim = 3
hidden_dim = 4
output_dim = 3
learning_rate = 0.1
num_iterations = 10000
# 初始化神经网络模型
nn = NeuralNetwork(input_dim, hidden_dim, output_dim)
# 训练模型
for i in range(num_iterations):
y_pred = nn.forward(X)
nn.backward(X, y, learning_rate)
if i % 1000 == 0:
loss = np.mean((y_pred - y) ** 2)
print("Iteration %d, loss = %.4f" % (i, loss))
# 预测新样本
X_new = np.array([[1.5, 2.5, -5.5], [-1.5, 2.5, -3.5], [0.5, 1.5, -3.5]])
y_pred_new = nn.forward(X_new)
print("New samples: ")
print(X_new)
print("Predictions: ")
print(y_pred_new)
```
这段代码定义了一个三层前向神经网络,包括一个输入层、一个隐含层和一个输出层。其中,隐含层的激励函数使用tanh函数,输出层的激励函数使用sigmoid函数。目标函数采用平方误差准则函数。训练数据包括30个样本,每个样本有三个特征,输出层有3个神经元,分别对应三个类别。最后,我们使用训练好的模型来预测三个新样本的类别。
阅读全文