码迷,mamicode.com
首页 > 编程语言 > 详细

克隆选择算法-python实现

时间:2015-10-06 23:42:51      阅读:429      评论:0      收藏:0      [点我收藏+]

标签:

CSAIndividual.py

 1 import numpy as np
 2 import ObjFunction
 3 
 4 
 5 class CSAIndividual:
 6 
 7     ‘‘‘
 8     individual of clone selection algorithm
 9     ‘‘‘
10 
11     def __init__(self,  vardim, bound):
12         ‘‘‘
13         vardim: dimension of variables
14         bound: boundaries of variables
15         ‘‘‘
16         self.vardim = vardim
17         self.bound = bound
18         self.fitness = 0.
19         self.trials = 0
20 
21     def generate(self):
22         ‘‘‘
23         generate a random chromsome for clone selection algorithm
24         ‘‘‘
25         len = self.vardim
26         rnd = np.random.random(size=len)
27         self.chrom = np.zeros(len)
28         for i in xrange(0, len):
29             self.chrom[i] = self.bound[0, i] + 30                 (self.bound[1, i] - self.bound[0, i]) * rnd[i]
31 
32     def calculateFitness(self):
33         ‘‘‘
34         calculate the fitness of the chromsome
35         ‘‘‘
36         self.fitness = ObjFunction.GrieFunc(
37             self.vardim, self.chrom, self.bound)

CSA.py

  1 import numpy as np
  2 from CSAIndividual import CSAIndividual
  3 import random
  4 import copy
  5 import matplotlib.pyplot as plt
  6 
  7 
  8 class CloneSelectionAlgorithm:
  9 
 10     ‘‘‘
 11     the class for clone selection algorithm
 12     ‘‘‘
 13 
 14     def __init__(self, sizepop, vardim, bound, MAXGEN, params):
 15         ‘‘‘
 16         sizepop: population sizepop
 17         vardim: dimension of variables
 18         bound: boundaries of variables
 19         MAXGEN: termination condition
 20         params: algorithm required parameters, it is a list which is consisting of[beta, pm, alpha_max, alpha_min]
 21         ‘‘‘
 22         self.sizepop = sizepop
 23         self.vardim = vardim
 24         self.bound = bound
 25         self.MAXGEN = MAXGEN
 26         self.params = params
 27         self.population = []
 28         self.fitness = np.zeros(self.sizepop)
 29         self.trace = np.zeros((self.MAXGEN, 2))
 30 
 31     def initialize(self):
 32         ‘‘‘
 33         initialize the population of ba
 34         ‘‘‘
 35         for i in xrange(0, self.sizepop):
 36             ind = CSAIndividual(self.vardim, self.bound)
 37             ind.generate()
 38             self.population.append(ind)
 39 
 40     def evaluation(self):
 41         ‘‘‘
 42         evaluation the fitness of the population
 43         ‘‘‘
 44         for i in xrange(0, self.sizepop):
 45             self.population[i].calculateFitness()
 46             self.fitness[i] = self.population[i].fitness
 47 
 48     def solve(self):
 49         ‘‘‘
 50         the evolution process of the clone selection algorithm
 51         ‘‘‘
 52         self.t = 0
 53         self.initialize()
 54         self.evaluation()
 55         bestIndex = np.argmax(self.fitness)
 56         self.best = copy.deepcopy(self.population[bestIndex])
 57         while self.t < self.MAXGEN:
 58             self.t += 1
 59             tmpPop = self.reproduction()
 60             tmpPop = self.mutation(tmpPop)
 61             self.selection(tmpPop)
 62             best = np.max(self.fitness)
 63             bestIndex = np.argmax(self.fitness)
 64             if best > self.best.fitness:
 65                 self.best = copy.deepcopy(self.population[bestIndex])
 66 
 67             self.avefitness = np.mean(self.fitness)
 68             self.trace[self.t - 1, 0] =  69                 (1 - self.best.fitness) / self.best.fitness
 70             self.trace[self.t - 1, 1] = (1 - self.avefitness) / self.avefitness
 71             print("Generation %d: optimal function value is: %f; average function value is %f" % (
 72                 self.t, self.trace[self.t - 1, 0], self.trace[self.t - 1, 1]))
 73         print("Optimal function value is: %f; " % self.trace[self.t - 1, 0])
 74         print "Optimal solution is:"
 75         print self.best.chrom
 76         self.printResult()
 77 
 78     def reproduction(self):
 79         ‘‘‘
 80         reproduction
 81         ‘‘‘
 82         tmpPop = []
 83         for i in xrange(0, self.sizepop):
 84             nc = int(self.params[1] * self.sizepop)
 85             for j in xrange(0, nc):
 86                 ind = copy.deepcopy(self.population[i])
 87                 tmpPop.append(ind)
 88         return tmpPop
 89 
 90     def mutation(self, tmpPop):
 91         ‘‘‘
 92         hypermutation
 93         ‘‘‘
 94         for i in xrange(0, self.sizepop):
 95             nc = int(self.params[1] * self.sizepop)
 96             for j in xrange(1, nc):
 97                 rnd = np.random.random(1)
 98                 if rnd < self.params[0]:
 99                     # alpha = self.params[
100                     #     2] + self.t * (self.params[3] - self.params[2]) / self.MAXGEN
101                     delta = self.params[2] + self.t * 102                         (self.params[3] - self.params[3]) / self.MAXGEN
103                     tmpPop[i * nc + j].chrom += np.random.normal(0.0, delta, self.vardim)
104                     # tmpPop[i * nc + j].chrom += alpha * np.random.random(
105                     # self.vardim) * (self.best.chrom - tmpPop[i * nc +
106                     # j].chrom)
107                     for k in xrange(0, self.vardim):
108                         if tmpPop[i * nc + j].chrom[k] < self.bound[0, k]:
109                             tmpPop[i * nc + j].chrom[k] = self.bound[0, k]
110                         if tmpPop[i * nc + j].chrom[k] > self.bound[1, k]:
111                             tmpPop[i * nc + j].chrom[k] = self.bound[1, k]
112                     tmpPop[i * nc + j].calculateFitness()
113         return tmpPop
114 
115     def selection(self, tmpPop):
116         ‘‘‘
117         re-selection
118         ‘‘‘
119         for i in xrange(0, self.sizepop):
120             nc = int(self.params[1] * self.sizepop)
121             best = 0.0
122             bestIndex = -1
123             for j in xrange(0, nc):
124                 if tmpPop[i * nc + j].fitness > best:
125                     best = tmpPop[i * nc + j].fitness
126                     bestIndex = i * nc + j
127             if self.fitness[i] < best:
128                 self.population[i] = copy.deepcopy(tmpPop[bestIndex])
129                 self.fitness[i] = best
130 
131     def printResult(self):
132         ‘‘‘
133         plot the result of clone selection algorithm
134         ‘‘‘
135         x = np.arange(0, self.MAXGEN)
136         y1 = self.trace[:, 0]
137         y2 = self.trace[:, 1]
138         plt.plot(x, y1, r, label=optimal value)
139         plt.plot(x, y2, g, label=average value)
140         plt.xlabel("Iteration")
141         plt.ylabel("function value")
142         plt.title("Clone selection algorithm for function optimization")
143         plt.legend()
144         plt.show()

 运行程序:

1 if __name__ == "__main__":
2 
3     bound = np.tile([[-600], [600]], 25)
4     csa = CSA(50, 25, bound, 500, [0.3, 0.4, 5, 0.1])
5     csa.solve()

 

ObjFunction见简单遗传算法-python实现

克隆选择算法-python实现

标签:

原文地址:http://www.cnblogs.com/biaoyu/p/4857928.html

(0)
(0)
   
举报
评论 一句话评论(0
登录后才能评论!
© 2014 mamicode.com 版权所有  联系我们:gaon5@hotmail.com
迷上了代码!