bp神经网络分类python详细例子
时间: 2023-10-08 07:08:57 浏览: 88
用Python实现BP神经网络(附代码)
5星 · 资源好评率100%
以下是一个基于BP神经网络的分类Python代码示例:
首先,需要导入相关的库:
```python
import numpy as np
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split
```
然后,生成一些分类数据集,用于训练和测试:
```python
X, y = make_classification(n_samples=1000, n_features=10, n_classes=2)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
```
接下来,定义一个BP神经网络类:
```python
class BPNeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.weights1 = np.random.randn(self.input_size, self.hidden_size)
self.bias1 = np.zeros((1, self.hidden_size))
self.weights2 = np.random.randn(self.hidden_size, self.output_size)
self.bias2 = np.zeros((1, self.output_size))
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def forward(self, X):
self.z1 = np.dot(X, self.weights1) + self.bias1
self.a1 = self.sigmoid(self.z1)
self.z2 = np.dot(self.a1, self.weights2) + self.bias2
self.a2 = self.sigmoid(self.z2)
return self.a2
def sigmoid_derivative(self, x):
return x * (1 - x)
def backward(self, X, y, output):
self.output_error = y - output
self.output_delta = self.output_error * self.sigmoid_derivative(output)
self.z1_error = np.dot(self.output_delta, self.weights2.T)
self.z1_delta = self.z1_error * self.sigmoid_derivative(self.a1)
self.weights1 += np.dot(X.T, self.z1_delta)
self.bias1 += np.sum(self.z1_delta, axis=0, keepdims=True)
self.weights2 += np.dot(self.a1.T, self.output_delta)
self.bias2 += np.sum(self.output_delta, axis=0, keepdims=True)
def train(self, X, y, num_iterations):
for i in range(num_iterations):
output = self.forward(X)
self.backward(X, y, output)
def predict(self, X):
return np.round(self.forward(X))
```
最后,创建一个BP神经网络对象并进行训练和预测:
```python
bpnn = BPNeuralNetwork(input_size=10, hidden_size=5, output_size=1)
bpnn.train(X_train, y_train, num_iterations=1000)
y_pred = bpnn.predict(X_test)
```
完整的代码示例如下:
```python
import numpy as np
from sklearn.datasets import make_classification
from sklearn.model_selection import train_test_split
class BPNeuralNetwork:
def __init__(self, input_size, hidden_size, output_size):
self.input_size = input_size
self.hidden_size = hidden_size
self.output_size = output_size
self.weights1 = np.random.randn(self.input_size, self.hidden_size)
self.bias1 = np.zeros((1, self.hidden_size))
self.weights2 = np.random.randn(self.hidden_size, self.output_size)
self.bias2 = np.zeros((1, self.output_size))
def sigmoid(self, x):
return 1 / (1 + np.exp(-x))
def forward(self, X):
self.z1 = np.dot(X, self.weights1) + self.bias1
self.a1 = self.sigmoid(self.z1)
self.z2 = np.dot(self.a1, self.weights2) + self.bias2
self.a2 = self.sigmoid(self.z2)
return self.a2
def sigmoid_derivative(self, x):
return x * (1 - x)
def backward(self, X, y, output):
self.output_error = y - output
self.output_delta = self.output_error * self.sigmoid_derivative(output)
self.z1_error = np.dot(self.output_delta, self.weights2.T)
self.z1_delta = self.z1_error * self.sigmoid_derivative(self.a1)
self.weights1 += np.dot(X.T, self.z1_delta)
self.bias1 += np.sum(self.z1_delta, axis=0, keepdims=True)
self.weights2 += np.dot(self.a1.T, self.output_delta)
self.bias2 += np.sum(self.output_delta, axis=0, keepdims=True)
def train(self, X, y, num_iterations):
for i in range(num_iterations):
output = self.forward(X)
self.backward(X, y, output)
def predict(self, X):
return np.round(self.forward(X))
X, y = make_classification(n_samples=1000, n_features=10, n_classes=2)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
bpnn = BPNeuralNetwork(input_size=10, hidden_size=5, output_size=1)
bpnn.train(X_train, y_train, num_iterations=1000)
y_pred = bpnn.predict(X_test)
```
阅读全文