用python写个前馈神经网络
时间: 2023-03-13 21:16:33 浏览: 114
我可以提供一个简单的Python代码示例来构建一个前馈神经网络:import numpy as np# 定义输入和输出
X = np.array([[0, 0, 1], [0, 1, 1], [1, 0, 1], [1, 1, 1]])
y = np.array([[0],[1],[1],[0]])# 定义模型参数
num_epochs = 60000
input_layer_neurons = 3
hidden_layer_neurons = 4
output_neurons = 1# 初始化权重和偏置
wh = np.random.uniform(size=(input_layer_neurons,hidden_layer_neurons))
bh = np.random.uniform(size=(1,hidden_layer_neurons))
wout = np.random.uniform(size=(hidden_layer_neurons,output_neurons))
bout = np.random.uniform(size=(1,output_neurons))# 定义sigmoid函数
def sigmoid(x):
return 1/(1+np.exp(-x))# 前馈神经网络
for i in range(num_epochs):
# 隐藏层输入
hidden_layer_input1=np.dot(X,wh)
hidden_layer_input=hidden_layer_input1 + bh
# 隐藏层激活
hiddenlayer_activations = sigmoid(hidden_layer_input)
# 输出层输入
output_layer_input1=np.dot(hiddenlayer_activations,wout)
output_layer_input= output_layer_input1+ bout
# 输出层激活
output = sigmoid(output_layer_input) # 定义反向传播
# 计算误差
E = y-output
# 计算输出层梯度
slide_h = E * output * (1-output)
# 计算隐藏层梯度
slide_wh = hiddenlayer_activations * (1-hiddenlayer_activations)* (np.dot(slide_h, wout.T))
# 更新权重
wout += np.dot(hiddenlayer_activations.T,slide_h) *0.1
wh += np.dot(X.T,slide_wh) *0.1
# 更新偏置
bout += np.sum(slide_h, axis=0,keepdims=True) *0.1
bh += np.sum(slide_wh, axis=0,keepdims=True) *0.1
print('输出: \n', output)
阅读全文