当前位置:   article > 正文

52、逻辑回归_1 for i in range(len(y1)) if y[i] == y1[i] 1 else

1 for i in range(len(y1)) if y[i] == y1[i] 1 else 0
  1. import random as rd
  2. import matplotlib.pyplot as plt
  3. import functools
  4. import math
  5. import numpy as np
  6. def sigmoid(x):
  7. if x >= 500:
  8. return 0
  9. if x <= -500:
  10. return 1
  11. return 1/(1+math.exp(-x))
  12. def sample_generate(n, low, sup, w, e):
  13. X = [[(sup-low) * rd.random() + low if j < len(w)-1 else 1 for j in range(len(w))] for i in range(n)]
  14. Y = [round(sigmoid(sum([w[j]*X[i][j] for j in range(len(w))])+e*(rd.random()-0.5))) for i in range(n)]
  15. return X,Y
  16. def LR_2_plot(X, Y):
  17. X = [X[i][0] for i in range(len(X))]
  18. c = ["r" if Y[i] == 1 else "b" for i in range(len(Y))]
  19. #点、线展示
  20. plt.scatter(X, Y, edgecolors=c)
  21. plt.show()
  22. def f(w, X, Y):
  23. return sum([math.pow((sigmoid(sum([X[i][j]*w[j] for j in range(len(w))])) - Y[i]),2) for i in range(len(X))])
  24. def mod(dw):
  25. return math.sqrt(sum([dw[i]*dw[i] for i in range(len(dw))]))
  26. # r为放缩尺度, step_min为最小步长
  27. def Gradient(X, Y, r = 0.8, step_min = 0.00001):
  28. n = len(X)
  29. d = len(X[0])
  30. w = [1 for i in range(d)]
  31. while True:
  32. print(w)
  33. left = 0
  34. right = 1
  35. dw = [sum([X[j][i]*(sigmoid(sum([X[j][k]*w[k] for k in range(d)])) - Y[j])*(sigmoid(sum([X[j][k]*w[k] for k in range(d)])) - 1) for j in range(n)]) for i in range(d)]
  36. # 先采用放缩法确定寻优区间
  37. while f([w[i]+right*dw[i] for i in range(d)], X, Y) < f(w, X, Y):
  38. right = right*(2-r)
  39. # 采用三分法确定步长
  40. mid1 = left*2/3 + right/3
  41. mid2 = left/3 + right*2/3
  42. while abs(left - right)*mod(dw) > step_min:
  43. if f([w[i]+mid1*dw[i] for i in range(d)], X, Y) < f([w[i]+mid2*dw[i] for i in range(d)], X, Y):
  44. right = mid2
  45. else:
  46. left = mid1
  47. mid1 = left * 2 / 3 + right / 3
  48. mid2 = left / 3 + right * 2 / 3
  49. if left*mod(dw) < step_min:
  50. break
  51. w = [w[i] + left * dw[i] for i in range(d)]
  52. return w
  53. if __name__ == "__main__":
  54. X, Y = sample_generate(n = 1000, low = -10, sup = 10, w = [1,2,3,4,5], e = 5)
  55. # X = [[8.413821508016959, 1], [8.899549892638564, 1], [-2.3498826552587904, 1], [9.602673657236384, 1], [9.195780070565458, 1], [-5.545278156033005, 1], [-6.34749638138797, 1], [5.392609937972598, 1], [-8.072733305295914, 1], [-7.228355148125498, 1]]
  56. # Y = [1, 1, 0, 1, 1, 0, 0, 1, 0, 0]
  57. print(X)
  58. print(Y)
  59. #样本画图,仅限w为2维
  60. # LR_2_plot(X, Y)
  61. # 梯度下降法
  62. w = Gradient(X, Y)
  63. print("\nw: ", w)
  64. Y1 = [round(sigmoid(sum([w[j]*X[i][j] for j in range(len(w))]))) for i in range(len(X))]
  65. print("回归准确率:", sum([1 if Y[i] == Y1[i] else 0 for i in range(len(Y))])/len(Y))

 

声明:本文内容由网友自发贡献,不代表【wpsshop博客】立场,版权归原作者所有,本站不承担相应法律责任。如您发现有侵权的内容,请联系我们。转载请注明出处:https://www.wpsshop.cn/w/小丑西瓜9/article/detail/145148
推荐阅读
相关标签
  

闽ICP备14008679号