码迷,mamicode.com
首页 > 编程语言 > 详细

差分进化算法-python实现

时间:2015-10-06 23:41:30      阅读:1372      评论:0      收藏:0      [点我收藏+]

标签:

DEIndividual.py

 1 import numpy as np
 2 import ObjFunction
 3 
 4 
 5 class DEIndividual:
 6 
 7     ‘‘‘
 8     individual of differential evolution algorithm
 9     ‘‘‘
10 
11     def __init__(self,  vardim, bound):
12         ‘‘‘
13         vardim: dimension of variables
14         bound: boundaries of variables
15         ‘‘‘
16         self.vardim = vardim
17         self.bound = bound
18         self.fitness = 0.
19 
20     def generate(self):
21         ‘‘‘
22         generate a random chromsome for differential evolution algorithm
23         ‘‘‘
24         len = self.vardim
25         rnd = np.random.random(size=len)
26         self.chrom = np.zeros(len)
27         for i in xrange(0, len):
28             self.chrom[i] = self.bound[0, i] + 29                 (self.bound[1, i] - self.bound[0, i]) * rnd[i]
30 
31     def calculateFitness(self):
32         ‘‘‘
33         calculate the fitness of the chromsome
34         ‘‘‘
35         self.fitness = ObjFunction.GrieFunc(
36             self.vardim, self.chrom, self.bound)

DE.py

  1 import numpy as np
  2 from DEIndividual import DEIndividual
  3 import random
  4 import copy
  5 import matplotlib.pyplot as plt
  6 
  7 
  8 class DifferentialEvolutionAlgorithm:
  9 
 10     ‘‘‘
 11     The class for differential evolution algorithm
 12     ‘‘‘
 13 
 14     def __init__(self, sizepop, vardim, bound, MAXGEN, params):
 15         ‘‘‘
 16         sizepop: population sizepop
 17         vardim: dimension of variables
 18         bound: boundaries of variables
 19         MAXGEN: termination condition
 20         param: algorithm required parameters, it is a list which is consisting of [crossover rate CR, scaling factor F]
 21         ‘‘‘
 22         self.sizepop = sizepop
 23         self.MAXGEN = MAXGEN
 24         self.vardim = vardim
 25         self.bound = bound
 26         self.population = []
 27         self.fitness = np.zeros((self.sizepop, 1))
 28         self.trace = np.zeros((self.MAXGEN, 2))
 29         self.params = params
 30 
 31     def initialize(self):
 32         ‘‘‘
 33         initialize the population
 34         ‘‘‘
 35         for i in xrange(0, self.sizepop):
 36             ind = DEIndividual(self.vardim, self.bound)
 37             ind.generate()
 38             self.population.append(ind)
 39 
 40     def evaluate(self, x):
 41         ‘‘‘
 42         evaluation of the population fitnesses
 43         ‘‘‘
 44         x.calculateFitness()
 45 
 46     def solve(self):
 47         ‘‘‘
 48         evolution process of differential evolution algorithm
 49         ‘‘‘
 50         self.t = 0
 51         self.initialize()
 52         for i in xrange(0, self.sizepop):
 53             self.evaluate(self.population[i])
 54             self.fitness[i] = self.population[i].fitness
 55         best = np.max(self.fitness)
 56         bestIndex = np.argmax(self.fitness)
 57         self.best = copy.deepcopy(self.population[bestIndex])
 58         self.avefitness = np.mean(self.fitness)
 59         self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
 60         self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
 61         print("Generation %d: optimal function value is: %f; average function value is %f" % (
 62             self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
 63         while (self.t < self.MAXGEN - 1):
 64             self.t += 1
 65             for i in xrange(0, self.sizepop):
 66                 vi = self.mutationOperation(i)
 67                 ui = self.crossoverOperation(i, vi)
 68                 xi_next = self.selectionOperation(i, ui)
 69                 self.population[i] = xi_next
 70             for i in xrange(0, self.sizepop):
 71                 self.evaluate(self.population[i])
 72                 self.fitness[i] = self.population[i].fitness
 73             best = np.max(self.fitness)
 74             bestIndex = np.argmax(self.fitness)
 75             if best > self.best.fitness:
 76                 self.best = copy.deepcopy(self.population[bestIndex])
 77             self.avefitness = np.mean(self.fitness)
 78             self.trace[self.t, 0] = (1 - self.best.fitness) / self.best.fitness
 79             self.trace[self.t, 1] = (1 - self.avefitness) / self.avefitness
 80             print("Generation %d: optimal function value is: %f; average function value is %f" % (
 81                 self.t, self.trace[self.t, 0], self.trace[self.t, 1]))
 82 
 83         print("Optimal function value is: %f; " %
 84               self.trace[self.t, 0])
 85         print "Optimal solution is:"
 86         print self.best.chrom
 87         self.printResult()
 88 
 89     def selectionOperation(self, i, ui):
 90         ‘‘‘
 91         selection operation for differential evolution algorithm
 92         ‘‘‘
 93         xi_next = copy.deepcopy(self.population[i])
 94         xi_next.chrom = ui
 95         self.evaluate(xi_next)
 96         if xi_next.fitness > self.population[i].fitness:
 97             return xi_next
 98         else:
 99             return self.population[i]
100 
101     def crossoverOperation(self, i, vi):
102         ‘‘‘
103         crossover operation for differential evolution algorithm
104         ‘‘‘
105         k = np.random.random_integers(0, self.vardim - 1)
106         ui = np.zeros(self.vardim)
107         for j in xrange(0, self.vardim):
108             pick = random.random()
109             if pick < self.params[0] or j == k:
110                 ui[j] = vi[j]
111             else:
112                 ui[j] = self.population[i].chrom[j]
113         return ui
114 
115     def mutationOperation(self, i):
116         ‘‘‘
117         mutation operation for differential evolution algorithm
118         ‘‘‘
119         a = np.random.random_integers(0, self.sizepop - 1)
120         while a == i:
121             a = np.random.random_integers(0, self.sizepop - 1)
122         b = np.random.random_integers(0, self.sizepop - 1)
123         while b == i or b == a:
124             b = np.random.random_integers(0, self.sizepop - 1)
125         c = np.random.random_integers(0, self.sizepop - 1)
126         while c == i or c == b or c == a:
127             c = np.random.random_integers(0, self.sizepop - 1)
128         vi = self.population[c].chrom + self.params[1] * 129             (self.population[a].chrom - self.population[b].chrom)
130         for j in xrange(0, self.vardim):
131             if vi[j] < self.bound[0, j]:
132                 vi[j] = self.bound[0, j]
133             if vi[j] > self.bound[1, j]:
134                 vi[j] = self.bound[1, j]
135         return vi
136 
137     def printResult(self):
138         ‘‘‘
139         plot the result of the differential evolution algorithm
140         ‘‘‘
141         x = np.arange(0, self.MAXGEN)
142         y1 = self.trace[:, 0]
143         y2 = self.trace[:, 1]
144         plt.plot(x, y1, r, label=optimal value)
145         plt.plot(x, y2, g, label=average value)
146         plt.xlabel("Iteration")
147         plt.ylabel("function value")
148         plt.title("Differential Evolution Algorithm for function optimization")
149         plt.legend()
150         plt.show()

 运行程序:

1 if __name__ == "__main__":
2 
3     bound = np.tile([[-600], [600]], 25)
4     dea = DEA(60, 25, bound, 1000, [0.8,  0.6])
5     dea.solve()

 

ObjFunction见简单遗传算法-python实现

差分进化算法-python实现

标签:

原文地址:http://www.cnblogs.com/biaoyu/p/4857889.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!