Spectral clustering is usually the first to find the similarity between 22 samples. Then the Laplace matrix is obtained according to the similarity degree matrix, then each sample is mapped to the Laplace matrix special functions vector, and finally the K-means cluster is used.
Scikit-learn Open Source package already has a ready-made interface to use, detailed see
Http://scikit-learn.org/dev/modules/generated/sklearn.cluster.SpectralClustering.html#sklearn.cluster.SpectralClustering
Wrote an example of a test sample
Constructs a two-dimensional spatial sample point.
#!/usr/bin/env pythonimport randomimport numpy as npimport mathindex = 0pointlist = []FD = open ("Points.txt", ' W ') for X in Np.arange (0.1, 0.5): For y in Np.arange (0., 0.1): Print >> fd, str (index) + ' \ t ' +str (x) + ' \ t ' +st R (Y) pointlist.append ((Index, (x, y))) Index + = 1for x in Np.arange ( -10.0, -0.1, 0.5): for y in Np.arang E (0., 0.1): Print >> fd, str (index) + ' \ t ' +str (x) + ' \ t ' +str (y) pointlist.append ((Index, (x, y)) Index + = 1for x in Np.arange ( -10.0, -0.1, 0.5): For y in Np.arange ( -10.0, 0., 0.1): Print >> fd, St R (Index) + ' \ t ' +str (x) + ' \ t ' +str (y) pointlist.append ((Index, (x, y)) index + 1fd.close () def get_dist (Pnt1, PN T2): Return Math.sqrt ((Pnt1[1][0]-pnt2[1][0]) **2 + (Pnt1[1][1]-pnt2[1][1]) **2 = open ("Simfd", ' W ') for Pnt1 in Pointlist:for pnt2 in pointlist:index1, Index2 = Pnt1[0], pnt2[0] dist = get_dist (pnt1, Pnt2 ) If Dist <=0.00001:print >> simfd, str (index1) + "\ T" +str (INDEX2) + "\ T" + "ten" continue sim = 1. 0/dist print >> simfd, str (index1) + "\ T" +str (INDEX2) + "\ T" + str (SIM) simfd.close ()
Using Spectral clustering:
#!/usr/bin/env python# Authors:emmanuelle gouillart <[email protected]># Gael Varoquaux <[email& nbsp;protected]># License:bsd 3 clauseimport sysimport numpy as npfrom sklearn.cluster import Spectral_clusteringfrom Scipy.sparse Import coo_matrix############################################################################### Fid2fname = {}for line in open ("Points.txt"): line = Line.strip (). Split (' \ t ') fid2fname.setdefault (int (line[0]), (f Loat (line[1]), float (line[2])) N = Len (fid2fname) rowlist = []collist = []datalist = []for] in open ("Sim_pnts.txt"): line = Line.strip (). Split (' \ t ') if Len (line) < 3:continue F1, f2, sim = Line[:3] rowlist.append (int (F1)) Collist.append (int (F2)) Datalist.append (float (SIM)) for ID in fid2fname:rowlist.append (int (id)) collist.append (int (ID))) datalist.append (1.0) Row = Np.array (rowlist) col = Np.array (collist) data = Np.array (datalist) Graph = Coo_matrix (Data, (row, col)), shape= (n, N)) # # ############################################################################## force the Solver to be arpack, since AMG is numerically# unstable on this examplelabels = spectral_clustering (graph, n_clusters=3, eigen_solver= ' Arpack ') #print Labelscluster2fid = {}for index, lab in Enumerate (labels): Cluster2fid.setdefault (Lab, []) Cluster2fid[lab].append ( Index) for index, lab in Enumerate (CLUSTER2FID): FD = open ("cluster_%s"% Index, "W") for FID in Cluster2fid[lab]: Print >> fd, Fid2fname[fid]
Visualize the sample after clustering:
#!/usr/bin/env Pythonimport Matplotlib.pyplot as Pltplt.figure (figsize= (12,6)) Cluster_list = []cluster_0_x = []cluster _0_y = []for line in open ("Cluster_0"): Line = Line.strip (). Split (', ') x = float (Line[0][1:].strip ()) y = Flo At (Line[1][:-1].strip ()) cluster_0_x.append (x) cluster_0_y.append (y) plt.plot (cluster_0_x, cluster_0_y, ' or ') cluster_1_x = []cluster_1_y = []for line in open ("cluster_1"): Line = Line.strip (). Split (', ') x = float (line[ 0][1:].strip ()) y = float (Line[1][:-1].strip ()) cluster_1_x.append (x) cluster_1_y.append (y) Plt.plot ( Cluster_1_x, cluster_1_y, ' xb ') cluster_2_x = []cluster_2_y = []for] in open ("cluster_2"): Line = Line.strip (). SP Lit (', ') x = float (Line[0][1:].strip ()) y = float (Line[1][:-1].strip ()) cluster_2_x.append (x) Cluster_2_y.append (y) plt.plot (cluster_2_x, cluster_2_y, ' +g ') plt.show ()
Different colors represent different clusters, and it's good to see the clustering effect.
Spectral Clustering--spectralclustering