当前位置:   article > 正文

BP神经网络python实现_bp神经网络python代码

bp神经网络python代码
  1. import numpy as np
  2. import matplotlib.pyplot as plt
  3. def relu(x):
  4. return np.maximum(0, x)
  5. def relu_derivative(x):
  6. return np.where(x > 0, 1, 0)
  7. def init_data(x_dim, pam, x_test_dim):
  8. X = np.random.uniform(0.0, 1.0, x_dim)
  9. X_test = np.random.uniform(0.0, 1.0, x_test_dim)
  10. # print(X)
  11. Y = X.dot(np.array(pam))
  12. Y=np.expand_dims(Y, axis=0).T
  13. # X=np.expand_dims(X,axis=0)
  14. # print(X,Y)
  15. Y_test = X_test.dot(np.array(pam))
  16. Y_test = np.expand_dims(Y_test, axis=0).T
  17. return X, Y, X_test, Y_test
  18. def initialize(input_size, hidden_size, output_size):
  19. W1 = np.random.randn(input_size, hidden_size)
  20. b1 = np.zeros(hidden_size)
  21. W2 = np.random.randn(hidden_size, output_size)
  22. b2 = np.zeros(output_size)
  23. return W1, b1, W2, b2
  24. def forward(X, W1, b1, W2, b2):
  25. Z1 = np.dot(X, W1) + b1
  26. A1 = relu(Z1)
  27. Z2 = np.dot(A1, W2) + b2
  28. A2 = relu(Z2)
  29. return Z1, A1, Z2, A2
  30. def loss(Y, Z2):
  31. return np.mean((Y - Z2) ** 2)
  32. def backpropagation(X, Y, Z1, A1, Z2, A2, W1, b1, W2, b2, learning_rate):
  33. # w3 = 2 * (A2 - Y) / 10
  34. # dW2 = np.dot(A1.T, w3)
  35. # # print(dW2.shape)
  36. # db2 = np.sum(w3, axis=0)
  37. # # print(db2.shape)
  38. # dW1 = np.dot(X.T, (w3 * W2.T * relu_derivative(Z1)))
  39. # # print(dW1.shape)
  40. # db1 = np.sum(w3 * W2.T * relu_derivative(Z1), axis=0)
  41. # # print(db1.shape)
  42. w3 = 2 * (A2 - Y)
  43. dW2 = np.dot(A1.T, w3 * relu_derivative(Z2))
  44. # print(dW2.shape)
  45. db2 = np.sum(w3, axis=0)
  46. # print(db2.shape)
  47. w4 = 2 * (A2 - Y) * relu_derivative(Z2)
  48. dW1 = np.dot(X.T, (w4 * W2.T * relu_derivative(Z1)))
  49. # print(dW1.shape)
  50. db1 = np.sum(w3 * W2.T * relu_derivative(Z1), axis=0)
  51. # print(db1.shape)
  52. W1 -= learning_rate * dW1
  53. b1 -= learning_rate * db1
  54. W2 -= learning_rate * dW2
  55. b2 -= learning_rate * db2
  56. return W1, b1, W2, b2
  57. def train(input_size, output_size, X, Y, hidden_size, learning_rate, epochs):
  58. W1, b1, W2, b2 = initialize(input_size, hidden_size, output_size)
  59. b1 = np.expand_dims(b1, axis=0)
  60. b2 = np.expand_dims(b2, axis=0)
  61. # print(W1.shape, W2.shape, b1.shape, b2.shape)
  62. loss_list = []
  63. for epoch in range(epochs):
  64. Z1, A1, Z2, A2 = forward(X, W1, b1, W2, b2)
  65. loss1 = loss(Y, A2)
  66. W1, b1, W2, b2 = backpropagation(X, Y, Z1, A1, Z2, A2, W1, b1, W2, b2, learning_rate)
  67. print("已训练{}轮".format(epoch))
  68. loss_list.append(loss1)
  69. return W1, b1, W2, b2, loss_list
  70. def predict(X_text ,Y_test, W1, b1, W2, b2):
  71. Z1, A1, Z2, A2 = forward(X_text, W1, b1, W2, b2)
  72. loss1 = loss(Y_test, A2)
  73. print("预测值的损失为:", loss1)
  74. return Z2
  75. x_dim = (30, 3)
  76. x_test_dim = (5, 3)
  77. pam = [1.3, 0.5, 1.5]
  78. X, Y, X_test, Y_test = init_data(x_dim, pam, x_test_dim)
  79. # 训练神经网络
  80. hidden_size = 25
  81. learning_rate = 0.01
  82. epochs = 100
  83. input_size = x_dim[1]
  84. output_size = 1
  85. W1, b1, W2, b2, loss_list = train(input_size, output_size, X, Y, hidden_size, learning_rate, epochs)
  86. # 绘制loss曲线
  87. plt.figure()
  88. x = np.arange(0, epochs)
  89. plt.title("loss")
  90. plt.plot(x, loss_list)
  91. plt.show()
  92. # 预测
  93. predictions = predict(X_test, Y_test, W1, b1, W2, b2)
  94. print("预测值与真实值的差值:\n", predictions - Y_test)

# 运行结果

预测值的损失为: 0.13031508881235027
预测值与真实值的差值:
 [[0.10231436]
 [0.29744002]
 [0.30471985]
 [0.17542227]
 [0.65498816]]
 

# 损失曲线

 

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/我家小花儿/article/detail/327981?site
推荐阅读
相关标签
  

闽ICP备14008679号