# Coding:utf-8import NumPy as Npfrom sklearn import Linear_model, Datasetsimport Matplotlib.pyplot as Pltfrom scipy.stats Import normfrom scipy import fftfrom scipy.io Import Wavfilen = 40# hstack Data Stitching # rvs is random variates the meaning of stochastic variable # Two normal states were used when simulating X Distribution, respectively, to establish the respective mean, variance, generate 40 points x = Np.hstack ((Norm.rvs (loc=2, Size=n, scale=2), Norm.rvs (Loc=8, Size=n, scale=3)) # Zeros causes data points to generate 40 0,ones so that the data points generate 40 1y = Np.hstack ((Np.zeros (n), Np.ones (n))) # Create a 10 * 4 points (point) graph and set the resolution to 80plt.figure ( Figsize= (4), dpi=80) # sets the upper and lower bounds of the horizontal axis Plt.xlim (( -5)) # Scatter scatter plot plt.scatter (X, y, c=y) Plt.xlabel ("feature value") Plt.ylabel ("Class") Plt.grid (True, linestyle= '-', color= ' 0.75 ') plt.savefig ("C:/users/zhen/desktop/logistic_ Classify.png ", bbox_inches=" Tight ") # Linspace is looking for 10 numbers in a range of 5 to 15 xs = np.linspace ( -5,) #---Linear Regression----------from Sklearn.linear_model import LINEARREGRESSIONCLF = Linearregression () # Reshape the array into a 80-row, 1-column, two-dimensional array that conforms to the machine learning multidimensional linear regression format clf.fit (X.reshape (n * 2, 1), y) def lin_model (CLF, X): Return CLF.INTERCEpt_ + clf.coef_ * x#--logistic regression--------from sklearn.linear_model import LOGISTICREGRESSIONLOGCLF = Logisticre Gression () # Reshape re-turns the array into a 80-row, 1-column, two-dimensional array that conforms to the machine learning multidimensional linear regression format logclf.fit (X.reshape (n * 2, 1), y) def lr_model (CLF, X): return 1. 0/(1.0 + np.exp (-(Clf.intercept_ + clf.coef_ * X)) #----Plot---------------------------plt.figure (figsize= (10, 5)) # Create A row of two-column sub-graphs in the image of the first figure Plt.subplot (1, 2, 1) plt.scatter (x, y, c=y) plt.plot (x, Lin_model (CLF, x), "O", color= "Orange") Plt.plot ( XS, Lin_model (CLF, XS), "-", color= "green") Plt.xlabel ("feature value") Plt.ylabel ("Class") Plt.title ("Linear Fit") Plt.grid (True, linestyle= '-'-', color= ' 0.75 ') # Creates a row of two-column sub-graphs in the image of the second figure Plt.subplot (1, 2, 2) plt.scatter (x, y, c=y) plt.plot (x, Lr_model (LOGCLF, X). Ravel (), "O", color= "C") Plt.plot (XS, Lr_model (LOGCLF, xs). Ravel (), "-", color= "green") Plt.xlabel ( "Feature value") Plt.ylabel ("Class") Plt.title ("Logistic Fit") Plt.grid (True, linestyle= '-', color= ' 0.75 ') plt.tight_ Layout (pad=0.4, w_pad=0, h_pad=1.0)
Plt.savefig ("C:/users/zhen/desktop/logistic_classify2.png", bbox_inches= "tight")
Results:
The logistic regression of Python