Newton's Method
1 #Coding:utf-82 ImportMatplotlib.pyplot as Plt3 ImportNumPy as NP4 5 defDatan (length):#Generate Data6x = np.ones (shape = (length,3))7y =np.zeros (length)8 forIinchNp.arange (0,length/100,0.02):9X[100*i][0]=1Tenx[100*i][1]=I OneX[100*i][2]=i + 1 + np.random.uniform (0,1.2) AY[100*i]=1 -X[100*i+1][0]=1 -x[100*i+1][1]=i+0.01 thex[100*i+1][2]=i+0.01 + np.random.uniform (0,1.2) -y[100*i+1]=0 - returnx, y - + defSigmoid (x):#simoid function - return1.0/(1+np.exp (-x)) + A defDFP (x, Y, iter):#DFP Quasi-Newton method atn =Len (x[0]) -Theta=np.ones ((n,1)) -y=Np.mat (y). T -gk=Np.eye (n,n) -Grad_last = Np.dot (X.t,sigmoid (Np.dot (X,theta))-y) -cost=[] in forItinchRange (ITER): -PK =-1 *Gk.dot (grad_last) toRate=AlphA (X,Y,THETA,PK) +theta = theta + rate *PK -Grad= Np.dot (X.t,sigmoid (Np.dot (X,theta))-y) theDelta_k = rate *PK *Y_k = (Grad-grad_last) $Pk = Delta_k.dot (delta_k.t)/(Delta_k.t.dot (y_k))Panax Notoginsengqk= Gk.dot (y_k). dot (y_k.t). dot (gk)/(Y_k.t.dot (gk). dot (Y_k)) * (-1) -Gk + = Pk +Qk theGrad_last =Grad + cost.append (Np.sum (grad_last)) A returnTheta,cost the + defBFGS (x, Y, iter):#BFGS Quasi-Newton method -n =Len (x[0]) $Theta=np.ones ((n,1)) $y=Np.mat (y). T -bk=Np.eye (n,n) -Grad_last = Np.dot (X.t,sigmoid (Np.dot (X,theta))-y) thecost=[] - forItinchRange (ITER):WuyiPK =-1 *np.linalg.solve (Bk, Grad_last) theRate=AlphA (X,Y,THETA,PK) -theta = theta + rate *PK WuGrad= Np.dot (X.t,sigmoid (Np.dot (X,theta))-y) -Delta_k = rate *PK AboutY_k = (Grad-grad_last) $Pk = Y_k.dot (y_k.t)/(Y_k.t.dot (delta_k)) -qk= Bk.dot (delta_k) dot (delta_k.t) dot (BK)/(Delta_k.t.dot (BK). dot (Delta_k)) * (-1) -Bk + = Pk +Qk -Grad_last =Grad A cost.append (Np.sum (grad_last)) + returnTheta,cost the - defAlphA (X,Y,THETA,PK):#selects the alpha with the lowest cost of the first 20 iterations $C=float ("inf") thet=Theta the forKinchRange (1,200): theA=1.0/k**2 thetheta = t + A *PK -f= np.sum (Np.dot (x.t,sigmoid (Np.dot))-y)) in ifABS (f) >C: the Break theC=ABS (f) AboutAlpha=a the returnAlpha the the defNewtonmethod (x, Y, iter):#Newton's Method +m =len (x) -n =Len (x[0]) thetheta =Np.zeros (n)Bayicost=[] the forItinchRange (ITER): theGradientsum =Np.zeros (n) -Hessianmatsum = Np.zeros (Shape =(n,n)) - forIinchRange (m): thehypothesis =sigmoid (Np.dot (X[i], theta)) theLoss =hypothesis-Y[i] theGradient = loss*X[i] theGradientsum = gradientsum+Gradient -hessian=[b*x[i]* (1-hypothesis) *hypothesis forBinchX[i]] theHessianmatsum =Np.add (Hessianmatsum,hessian) theHESSIANMATINV =Np.mat (hessianmatsum). I the forKinchrange (n):94THETA[K]-=Np.dot (Hessianmatinv[k], gradientsum) the cost.append (Np.sum (gradientsum)) the returnTheta,cost the 98 defTesT (theta, X, y):#Accuracy Rate AboutLength=len (x) -Count=0101 forIinchxrange (length):102predict = Sigmoid (X[i,:] * Np.reshape (Theta, (3,1))) [0] > 0.5103 ifpredict = =bool (Y[i]):104count+= 1 theAccuracy = float (count)/length106 returnaccuracy107 108 defSHOWP (X,y,theta,cost,iter):#Drawing109Plt.figure (1) the Plt.plot (ITER), cost)111Plt.figure (2) thecolor=['or','ob']113 forIinchxrange (length): thePlt.plot (X[i, 1], x[i, 2],color[int (Y[i])) thePlt.plot ([0,length/100],[-theta[0],-theta[0]-theta[1]*length/100]/theta[2]) the plt.show ()117length=200118Iter=5119x,y=Datan (length) - 121theta,cost=BFGS (x,y,iter)122 PrintTheta#[[ -18.93768161][-16.52178427][16.95779981]]123 PrintTesT (Theta, Np.mat (x), y)#0.935124 showp (X,y,theta.geta (), Cost,iter) the 126theta,cost=DFP (x,y,iter)127 PrintTheta#[[ -18.51841028][-16.17880599][16.59649161]] - PrintTesT (Theta, Np.mat (x), y)#0.935129 showp (X,y,theta.geta (), Cost,iter) the 131theta,cost=Newtonmethod (x,y,iter) the PrintTheta#[ -14.49650536-12.78692552 13.05843361]133 PrintTesT (Theta, Np.mat (x), y)#0.935134SHOWP (X,y,theta,cost,iter)
Newton method of Logistic regression and the solution of DFP and Bfgs quasi-Newton method