layerGraph.py的代码和测试代码
时间: 2024-05-10 15:14:24 浏览: 98
以下是layerGraph.py的代码:
```python
import numpy as np
class Layer:
def __init__(self):
self.input = None
self.output = None
def forward(self, input):
raise NotImplementedError
def backward(self, output_error, learning_rate):
raise NotImplementedError
class Input(Layer):
def forward(self, input):
self.output = input
class Dense(Layer):
def __init__(self, input_size, output_size):
self.weights = np.random.randn(input_size, output_size) * np.sqrt(2.0/input_size)
self.bias = np.zeros(output_size)
def forward(self, input):
self.input = input
self.output = np.dot(input, self.weights) + self.bias
def backward(self, output_error, learning_rate):
input_error = np.dot(output_error, self.weights.T)
weights_error = np.dot(self.input.T, output_error)
bias_error = np.sum(output_error, axis=0)
self.weights -= learning_rate * weights_error
self.bias -= learning_rate * bias_error
return input_error
class ReLU(Layer):
def forward(self, input):
self.input = input
self.output = np.maximum(0, input)
def backward(self, output_error, learning_rate):
input_error = output_error.copy()
input_error[self.input <= 0] = 0
return input_error
class Softmax(Layer):
def forward(self, input):
self.input = input
exp_values = np.exp(input - np.max(input, axis=1, keepdims=True))
probabilities = exp_values / np.sum(exp_values, axis=1, keepdims=True)
self.output = probabilities
def backward(self, output_error, learning_rate):
input_error = np.zeros_like(output_error)
for i, (single_output_error, single_output) in enumerate(zip(output_error, self.output)):
jacobian_matrix = np.diagflat(single_output) - np.dot(single_output, single_output.T)
input_error[i] = np.dot(jacobian_matrix, single_output_error)
return input_error
```
以下是测试代码:
```python
import numpy as np
from layerGraph import Input, Dense, ReLU, Softmax
# Create layers
input_layer = Input()
dense_layer1 = Dense(2, 3)
relu_layer = ReLU()
dense_layer2 = Dense(3, 2)
softmax_layer = Softmax()
# Forward pass
input_data = np.array([[1.0, 2.0]])
input_layer.forward(input_data)
dense_layer1.forward(input_layer.output)
relu_layer.forward(dense_layer1.output)
dense_layer2.forward(relu_layer.output)
softmax_layer.forward(dense_layer2.output)
# Print output
print("Input: {}".format(input_data))
print("Output: {}".format(softmax_layer.output))
# Backward pass
target_output = np.array([[0.0, 1.0]])
output_error = softmax_layer.output - target_output
input_error = softmax_layer.backward(output_error, 0.1)
input_error = dense_layer2.backward(input_error, 0.1)
input_error = relu_layer.backward(input_error, 0.1)
input_error = dense_layer1.backward(input_error, 0.1)
# Print input error
print("Input error: {}".format(input_error))
```
阅读全文