赞
踩
- import random as rd
- import matplotlib.pyplot as plt
- import functools
- import math
- import numpy as np
-
- def sigmoid(x):
- if x >= 500:
- return 0
- if x <= -500:
- return 1
- return 1/(1+math.exp(-x))
-
- def sample_generate(n, low, sup, w, e):
- X = [[(sup-low) * rd.random() + low if j < len(w)-1 else 1 for j in range(len(w))] for i in range(n)]
- Y = [round(sigmoid(sum([w[j]*X[i][j] for j in range(len(w))])+e*(rd.random()-0.5))) for i in range(n)]
- return X,Y
-
- def LR_2_plot(X, Y):
-
- X = [X[i][0] for i in range(len(X))]
- c = ["r" if Y[i] == 1 else "b" for i in range(len(Y))]
- #点、线展示
- plt.scatter(X, Y, edgecolors=c)
- plt.show()
-
- def f(w, X, Y):
- return sum([math.pow((sigmoid(sum([X[i][j]*w[j] for j in range(len(w))])) - Y[i]),2) for i in range(len(X))])
-
- def mod(dw):
- return math.sqrt(sum([dw[i]*dw[i] for i in range(len(dw))]))
-
- # r为放缩尺度, step_min为最小步长
- def Gradient(X, Y, r = 0.8, step_min = 0.00001):
- n = len(X)
- d = len(X[0])
- w = [1 for i in range(d)]
-
- while True:
- print(w)
- left = 0
- right = 1
- dw = [sum([X[j][i]*(sigmoid(sum([X[j][k]*w[k] for k in range(d)])) - Y[j])*(sigmoid(sum([X[j][k]*w[k] for k in range(d)])) - 1) for j in range(n)]) for i in range(d)]
- # 先采用放缩法确定寻优区间
- while f([w[i]+right*dw[i] for i in range(d)], X, Y) < f(w, X, Y):
- right = right*(2-r)
- # 采用三分法确定步长
- mid1 = left*2/3 + right/3
- mid2 = left/3 + right*2/3
- while abs(left - right)*mod(dw) > step_min:
- if f([w[i]+mid1*dw[i] for i in range(d)], X, Y) < f([w[i]+mid2*dw[i] for i in range(d)], X, Y):
- right = mid2
- else:
- left = mid1
- mid1 = left * 2 / 3 + right / 3
- mid2 = left / 3 + right * 2 / 3
-
- if left*mod(dw) < step_min:
- break
- w = [w[i] + left * dw[i] for i in range(d)]
-
- return w
-
-
- if __name__ == "__main__":
-
-
- X, Y = sample_generate(n = 1000, low = -10, sup = 10, w = [1,2,3,4,5], e = 5)
- # X = [[8.413821508016959, 1], [8.899549892638564, 1], [-2.3498826552587904, 1], [9.602673657236384, 1], [9.195780070565458, 1], [-5.545278156033005, 1], [-6.34749638138797, 1], [5.392609937972598, 1], [-8.072733305295914, 1], [-7.228355148125498, 1]]
- # Y = [1, 1, 0, 1, 1, 0, 0, 1, 0, 0]
- print(X)
- print(Y)
-
- #样本画图,仅限w为2维
- # LR_2_plot(X, Y)
-
- # 梯度下降法
- w = Gradient(X, Y)
- print("\nw: ", w)
-
- Y1 = [round(sigmoid(sum([w[j]*X[i][j] for j in range(len(w))]))) for i in range(len(X))]
- print("回归准确率:", sum([1 if Y[i] == Y1[i] else 0 for i in range(len(Y))])/len(Y))
Copyright © 2003-2013 www.wpsshop.cn 版权所有,并保留所有权利。