python实现mlp算法代码
时间: 2024-03-15 22:39:31 浏览: 119
MLP(多层感知机)是一种常用的神网络模型,用于解决分类和回归问题。下面是一个简单的Python代码示例,用于实现MLP算法:
```python
import numpy as np
class MLP:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
# 初始化权重
self.W1 = np.random.randn(self.input_size, self.hidden_size)
self.b1 = np.zeros((1, self.hidden_size))
self.W2 = np.random.randn(self.hidden_size, self.output_size)
self.b2 = np.zeros((1, self.output_size))
def forward(self, X):
# 前向传播
self.z1 = np.dot(X, self.W1) + self.b1
self.a1 = np.tanh(self.z1)
self.z2 = np.dot(self.a1, self.W2) + self.b2
self.a2 = self.sigmoid(self.z2)
return self.a2
def backward(self, X, y, learning_rate):
# 反向传播
m = X.shape[0]
# 计算输出层的误差
delta2 = self.a2 - y
# 计算隐藏层的误差
delta1 = np.dot(delta2, self.W2.T) * (1 - np.power(self.a1, 2))
# 更新权重和偏置
dW2 = np.dot(self.a1.T, delta2) / m
db2 = np.sum(delta2, axis=0) / m
dW1 = np.dot(X.T, delta1) / m
db1 = np.sum(delta1, axis=0) / m
self.W2 -= learning_rate * dW2
self.b2 -= learning_rate * db2
self.W1 -= learning_rate * dW1
self.b1 -= learning_rate * db1
def train(self, X, y, epochs, learning_rate):
for epoch in range(epochs):
# 前向传播
output = self.forward(X)
# 反向传播
self.backward(X, y, learning_rate)
# 计算损失函数
loss = self.loss_function(output, y)
if epoch % 100 == 0:
print(f"Epoch {epoch}, Loss: {loss}")
def predict(self, X):
# 预测
output = self.forward(X)
predictions = np.round(output)
return predictions
def loss_function(self, y_pred, y_true):
# 损失函数(交叉熵)
loss = -np.mean(y_true * np.log(y_pred) + (1 - y_true) * np.log(1 - y_pred))
return loss
def sigmoid(self, x):
# sigmoid激活函数
return 1 / (1 + np.exp(-x))
```
使用示例:
```python
# 创建MLP对象
mlp = MLP(input_size=2, hidden_size=4, output_size=1)
# 训练数据
X_train = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
y_train = np.array([[0], [1], [1], [0]])
# 训练模型
mlp.train(X_train, y_train, epochs=1000, learning_rate=0.1)
# 预测数据
X_test = np.array([[0, 0], [0, 1], [1, 0], [1, 1]])
predictions = mlp.predict(X_test)
print(predictions)
```
这段代码实现了一个简单的MLP算法,用于解决逻辑门问题(XOR)。你可以根据自己的需求进行修改和扩展。希望对你有帮助!
阅读全文