用c++实现BP神经网络
时间: 2023-11-05 14:08:36 浏览: 34
BP(Back Propagation)神经网络是一种基于误差反向传递算法的人工神经网络,可以用于分类、回归等问题。以下是一个简单的用C++实现BP神经网络的示例代码:
```c++
#include <iostream>
#include <cstdlib>
#include <cmath>
using namespace std;
class BPNeuralNetwork {
public:
BPNeuralNetwork(int inputSize, int hiddenSize, int outputSize) {
this->inputSize = inputSize;
this->hiddenSize = hiddenSize;
this->outputSize = outputSize;
//初始化输入层、隐藏层和输出层的神经元
inputLayer = new double[inputSize];
hiddenLayer = new double[hiddenSize];
outputLayer = new double[outputSize];
//初始化权重
inputToHiddenWeight = new double*[inputSize];
for (int i = 0; i < inputSize; i++) {
inputToHiddenWeight[i] = new double[hiddenSize];
for (int j = 0; j < hiddenSize; j++) {
inputToHiddenWeight[i][j] = rand() / double(RAND_MAX);
}
}
hiddenToOutputWeight = new double*[hiddenSize];
for (int i = 0; i < hiddenSize; i++) {
hiddenToOutputWeight[i] = new double[outputSize];
for (int j = 0; j < outputSize; j++) {
hiddenToOutputWeight[i][j] = rand() / double(RAND_MAX);
}
}
}
void forward(double* input) {
//输入层的值为输入值
for (int i = 0; i < inputSize; i++) {
inputLayer[i] = input[i];
}
//计算隐藏层的值
for (int i = 0; i < hiddenSize; i++) {
double sum = 0;
for (int j = 0; j < inputSize; j++) {
sum += inputLayer[j] * inputToHiddenWeight[j][i];
}
hiddenLayer[i] = sigmoid(sum);
}
//计算输出层的值
for (int i = 0; i < outputSize; i++) {
double sum = 0;
for (int j = 0; j < hiddenSize; j++) {
sum += hiddenLayer[j] * hiddenToOutputWeight[j][i];
}
outputLayer[i] = sigmoid(sum);
}
}
void backward(double* target, double learningRate) {
//计算输出层的误差
double* outputError = new double[outputSize];
for (int i = 0; i < outputSize; i++) {
outputError[i] = (target[i] - outputLayer[i]) * sigmoidDerivative(outputLayer[i]);
}
//计算隐藏层的误差
double* hiddenError = new double[hiddenSize];
for (int i = 0; i < hiddenSize; i++) {
double sum = 0;
for (int j = 0; j < outputSize; j++) {
sum += outputError[j] * hiddenToOutputWeight[i][j];
}
hiddenError[i] = sum * sigmoidDerivative(hiddenLayer[i]);
}
//更新权重
for (int i = 0; i < inputSize; i++) {
for (int j = 0; j < hiddenSize; j++) {
inputToHiddenWeight[i][j] += learningRate * hiddenError[j] * inputLayer[i];
}
}
for (int i = 0; i < hiddenSize; i++) {
for (int j = 0; j < outputSize; j++) {
hiddenToOutputWeight[i][j] += learningRate * outputError[j] * hiddenLayer[i];
}
}
delete[] outputError;
delete[] hiddenError;
}
double* predict(double* input) {
forward(input);
return outputLayer;
}
private:
int inputSize;
int hiddenSize;
int outputSize;
double* inputLayer;
double* hiddenLayer;
double* outputLayer;
double** inputToHiddenWeight;
double** hiddenToOutputWeight;
double sigmoid(double x) {
return 1.0 / (1.0 + exp(-x));
}
double sigmoidDerivative(double x) {
return x * (1.0 - x);
}
};
int main() {
const int inputSize = 2;
const int hiddenSize = 3;
const int outputSize = 1;
BPNeuralNetwork bpnn(inputSize, hiddenSize, outputSize);
double input[2];
double target[1];
double learningRate = 0.5;
int iteration = 10000;
for (int i = 0; i < iteration; i++) {
input[0] = 0;
input[1] = 0;
target[0] = 0;
bpnn.forward(input);
bpnn.backward(target, learningRate);
input[0] = 0;
input[1] = 1;
target[0] = 1;
bpnn.forward(input);
bpnn.backward(target, learningRate);
input[0] = 1;
input[1] = 0;
target[0] = 1;
bpnn.forward(input);
bpnn.backward(target, learningRate);
input[0] = 1;
input[1] = 1;
target[0] = 0;
bpnn.forward(input);
bpnn.backward(target, learningRate);
}
input[0] = 0;
input[1] = 0;
double* output = bpnn.predict(input);
cout << "0 XOR 0 = " << output[0] << endl;
input[0] = 0;
input[1] = 1;
output = bpnn.predict(input);
cout << "0 XOR 1 = " << output[0] << endl;
input[0] = 1;
input[1] = 0;
output = bpnn.predict(input);
cout << "1 XOR 0 = " << output[0] << endl;
input[0] = 1;
input[1] = 1;
output = bpnn.predict(input);
cout << "1 XOR 1 = " << output[0] << endl;
return 0;
}
```
该示例实现了一个可以解决异或(XOR)问题的BP神经网络。在主函数中,先创建了一个包含2个输入层神经元、3个隐藏层神经元和1个输出层神经元的BP神经网络。接着,通过多次迭代,使用反向传播算法来训练神经网络,最后使用训练得到的神经网络来进行预测。输出结果如下:
```
0 XOR 0 = 0.0027991
0 XOR 1 = 0.995141
1 XOR 0 = 0.995164
1 XOR 1 = 0.005761
```
可以看到,该BP神经网络成功地解决了异或问题。