当前位置:   article > 正文

GD/BGD/MBGD算法Python代码实现_mbgd代码

mbgd代码
  1. #批量梯度下降法
  2. x = [[1,1,4],[1,2,5],[1,5,1],[1,4,2]]
  3. y = [19,26,19,20]
  4. loss = 10
  5. Iter = 0
  6. theta = [1,1,1]
  7. step = 0.01
  8. precision = 0.0001
  9. MAX_Iters =100000
  10. err0=[0,0,0,0]
  11. err1=[0,0,0,0]
  12. err2=[0,0,0,0]
  13. errSum =[0,0,0]
  14. while(Iter < MAX_Iters and loss > precision):
  15. loss = 0
  16. errSum =[0,0,0]
  17. for index in range(len(x)):
  18. prediction = theta[0]*x[index][0]+theta[1]*x[index][1]+theta[2]*x[index][2]
  19. err0[index] = (prediction - y[index])*x[index][0]
  20. errSum[0] += err0[index]
  21. err1[index] = (prediction - y[index])*x[index][1]
  22. errSum[1] += err1[index]
  23. err2[index] = (prediction - y[index])*x[index][2]
  24. errSum[2] += err2[index]
  25. for index in range(3):
  26. theta[index] = theta[index] -step*errSum[index]/4
  27. for index in range(4):
  28. prediction = theta[0]*x[index][0]+theta[1]*x[index][1]+theta[2]*x[index][2]
  29. error = (1/(2*4))*(prediction - y[index])**2
  30. loss += error
  31. Iter = Iter+1
  32. print("Iter",Iter)
  33. print(error)
  34. print ("theta", theta)
  35. #随机梯度下降法
  36. import random
  37. x = [[1,1,4],[1,2,5],[1,5,1],[1,4,2]]
  38. y = [19,26,19,20]
  39. #初始值
  40. theta =[1,1,1]
  41. loss = 10
  42. precision = 0.0001
  43. step = 0.01
  44. MAX_Inter = 100000
  45. Iter = 0
  46. err = [0,0,0]
  47. error = 0
  48. while(Iter < MAX_Inter and loss > precision):
  49. loss = 0
  50. num = random.randint(0, 3)
  51. prediction = theta[0]*x[num][0]+theta[1]*x[num][1]+theta[2]*x[num][2]
  52. err[0] = (prediction - y[num])*x[num][0]
  53. err[1] = (prediction - y[num])*x[num][1]
  54. err[2] = (prediction - y[num])*x[num][2]
  55. for index in range(3):
  56. theta[index] = theta[index] -step*err[index]
  57. for index in range(4):
  58. prediction = theta[0]*x[index][0]+theta[1]*x[index][1]+theta[2]*x[index][2]
  59. error = (1/2)*(prediction - y[index])**2
  60. loss += error
  61. Iter = Iter+1
  62. print("Iter",Iter)
  63. print(error)
  64. print ("theta", theta)
  65. #小批量梯度下降法
  66. x = [[1,1,4],[1,2,5],[1,5,1],[1,4,2]]
  67. y = [19,26,19,20]
  68. loss = 10
  69. Iter = 0
  70. theta = [1,1,1]
  71. step = 0.1
  72. precision = 0.0001
  73. MAX_Iters =100000
  74. err0=[0,0,0,0]
  75. err1=[0,0,0,0]
  76. err2=[0,0,0,0]
  77. errSum =[0,0,0]
  78. while(Iter < MAX_Iters and loss > precision):
  79. loss = 0
  80. errSum =[0,0,0]
  81. for index in range(2):
  82. prediction = theta[0]*x[index][0]+theta[1]*x[index][1]+theta[2]*x[index][2]
  83. err0[index] = (prediction - y[index])*x[index][0]
  84. errSum[0] += err0[index]
  85. err1[index] = (prediction - y[index])*x[index][1]
  86. errSum[1] += err1[index]
  87. err2[index] = (prediction - y[index])*x[index][2]
  88. errSum[2] += err2[index]
  89. for index in range(3):
  90. theta[index] = theta[index] -step*errSum[index]/4
  91. for index in range(4):
  92. prediction = theta[0]*x[index][0]+theta[1]*x[index][1]+theta[2]*x[index][2]
  93. error = (1/(2*4))*(prediction - y[index])**2
  94. loss += error
  95. Iter = Iter+1
  96. print("Iter",Iter)
  97. print(error)
  98. print ("theta", theta)

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/你好赵伟/article/detail/170841?site
推荐阅读
相关标签
  

闽ICP备14008679号