psoindividual.py
1 ImportNumPy as NP2 Importobjfunction3 ImportCopy4 5 6 classpsoindividual:7 8 " "9 Individual of PSOTen " " One A def __init__(self, Vardim, bound): - " " - Vardim:dimension of Variables the Bound:boundaries of Variables - " " -Self.vardim =Vardim -Self.bound =bound +Self.fitness =0. - + defGenerate (self): A " " at generate a Rondom chromsome - " " -Len =Self.vardim -Rnd = Np.random.random (size=len) -Self.chrom =Np.zeros (len) -self.velocity = Np.random.random (size=len) in forIinchxrange (0, Len): -Self.chrom[i] = self.bound[0, I] + to(Self.bound[1, I]-self.bound[0, I]) *Rnd[i] +Self.bestposition =Np.zeros (len) -Self.bestfitness =0. the * defcalculatefitness (self): $ " "Panax Notoginseng calculate the fitness of the Chromsome - " " theSelf.fitness =Objfunction.griefunc ( +Self.vardim, Self.chrom, Self.bound)
pso.py
1 ImportNumPy as NP2 fromPsoindividualImportpsoindividual3 ImportRandom4 ImportCopy5 ImportMatplotlib.pyplot as Plt6 7 8 classparticleswarmoptimization:9 Ten " " One The class for particle Swarm optimization A " " - - def __init__(self, sizepop, Vardim, bound, Maxgen, params): the " " - sizepop:population Sizepop - Vardim:dimension of Variables - Bound:boundaries of Variables + maxgen:termination Condition - params:algorithm Required parameters, it is a list which is consisting of[w, C1, C2] + " " ASelf.sizepop =Sizepop atSelf.vardim =Vardim -Self.bound =bound -Self. Maxgen =Maxgen -Self.params =params -Self.population = [] -Self.fitness = Np.zeros ((self.sizepop, 1)) inSelf.trace = Np.zeros (self. Maxgen, 2)) - to defInitialize (self): + " " - Initialize the population of PSO the " " * forIinchxrange (0, Self.sizepop): $IND =psoindividual (Self.vardim, Self.bound)Panax Notoginseng ind.generate () - Self.population.append (Ind) the + defEvaluation (self): A " " the Evaluation The fitness of the population + " " - forIinchxrange (0, Self.sizepop): $ self.population[i].calculatefitness () $Self.fitness[i] =self.population[i].fitness - ifSelf.population[i].fitness >self.population[i].bestfitness: -Self.population[i].bestfitness =self.population[i].fitness theSelf.population[i].bestindex =Copy.deepcopy ( - self.population[i].chrom)Wuyi the defUpdate (self): - " " Wu Update the population of PSO - " " About forIinchxrange (0, Self.sizepop): $self.population[i].velocity = self.params[0] * self.population[i].velocity + self.params[1] * Np.random.random ( Self.vardim) * ( -Self.population[i].bestposition-self.population[i].chrom) + self.params[2] * Np.random.random (Self.vardim) * ( Self.best.chrom-self.population[i].chrom) -Self.population[i].chrom =self.population[ -I].chrom +self.population[i].velocity A + defSolve (self): the " " - The evolution process of the PSO algorithm $ " " theSELF.T =0 the self.initialize () the self.evaluation () theBest =Np.max (self.fitness) -Bestindex =Np.argmax (self.fitness) inSelf.best =copy.deepcopy (Self.population[bestindex]) theSelf.avefitness =Np.mean (self.fitness) theSELF.TRACE[SELF.T, 0] = (1-self.best.fitness)/self.best.fitness AboutSELF.TRACE[SELF.T, 1] = (1-self.avefitness)/self.avefitness the Print("Generation%d:optimal function value is:%f, average function value is%f"% ( theSELF.T, self.trace[self.t, 0], self.trace[self.t, 1])) the whileself.t < self. MAXGEN-1: +SELF.T + = 1 - self.update () the self.evaluation ()BayiBest =Np.max (self.fitness) theBestindex =Np.argmax (self.fitness) the ifBest >self.best.fitness: -Self.best =copy.deepcopy (Self.population[bestindex]) -Self.avefitness =Np.mean (self.fitness) theSELF.TRACE[SELF.T, 0] = (1-self.best.fitness)/self.best.fitness theSELF.TRACE[SELF.T, 1] = (1-self.avefitness)/self.avefitness the Print("Generation%d:optimal function value is:%f, average function value is%f"% ( theSELF.T, self.trace[self.t, 0], self.trace[self.t, 1])) - the Print("Optimal function value is:%f;"%self.trace[self.t, 0]) the Print "Optimal solution is:" the PrintSelf.best.chrom94 Self.printresult () the the defPrintresult (self): the " "98 plot The result of the PSO algorithm About " " -x =np.arange (0, self. Maxgen)101Y1 =self.trace[:, 0]102y2 = self.trace[:, 1]103Plt.plot (x, y1,'R', label='Optimal Value')104Plt.plot (x, Y2,'g', label='Average value') thePlt.xlabel ("Iteration")106Plt.ylabel ("function Value")107Plt.title ("particle Swarm optimization algorithm for function optimization")108 plt.legend ()109Plt.show ()
To run the program:
1 if __name__ " __main__ " : 2 3 bound = Np.tile ([[ -600], [+]],4 PSO = PSO (max, bound, +, [0.7298, 1.4962, 1.4962
])
5 pso.solve ()
Objfunction See simple genetic algorithm-python realization.
-python implementation of particle swarm optimization algorithm