我试图重现这段代码:https://github.com/Code-Bullet/Smart-Dots-Genetic-Algorithm-Tutorial/tree/master/BestTutorialEver,但用的是Python,结果它不起作用,每代都持续变异最佳点,并且每代开始的点越来越少。以下是代码(我使用pygame进行图形处理):
Brain类:
class Brain(object): def __init__(self, size): self.size = size self.step = 0 self.directions = [[0.0, 0.0] for j in range(size)] for i in range(len(self.directions)): randomAngle = random.uniform(0, 2 * math.pi) self.directions[i][0] = math.sin(randomAngle) self.directions[i][1] = math.cos(randomAngle) def mutate(self): mutationRate = 1 for i in range(self.size): rand = random.random() if rand < mutationRate: dirAngle = math.acos(self.directions[i][1]) * (1.0 + random.random()) self.directions[i][0] = math.sin(dirAngle) self.directions[i][1] = math.cos(dirAngle)
Population类:
class Population(object): def __init__(self, size, win): self.bestDot = 0 self.fitnessSum = 0.0 self.win = win self.size = size self.dots = [Dot(win) for i in range(size)] def show(self): for i in range(self.size-1): self.dots[i+1].show() self.dots[0].show() def updt(self): for i in range(self.size): self.dots[i].updt() def calculateFitness(self): for i in range(self.size): self.dots[i].calculateFitness() def allDotsDead(self): for i in range(self.size): if not self.dots[i].dead and not self.dots[i].reachGoal: return False return True def naturalSelection(self): newDots = [Dot(self.win) for i in range(self.size)] self.setBestDot() self.calculateFitnessSum() newDots[0] = self.dots[self.bestDot].baby() newDots[0].isBest = True for i in range(self.size-1): parent = self.selectParent() newDots[i+1] = parent.baby() print(newDots[1]) self.dots = newDots def calculateFitnessSum(self): self.fitnessSum = 0.0 for i in range(self.size): self.fitnessSum += self.dots[i].fitness print(self.fitnessSum) def selectParent(self): rand = random.uniform(0, self.fitnessSum) runningSum = 0.0 for i in range(self.size): runningSum += self.dots[i].fitness if runningSum > rand: return self.dots[i] return None def mutate(self): for i in range(self.size): if not self.dots[i].isBest: self.dots[i].brain.mutate() def setBestDot(self): max = 0.0 maxIndex = 0 for i in range(len(self.dots)): if self.dots[i].fitness > max: max = self.dots[i].fitness maxIndex = i self.bestDot = maxIndex
Dot类:
WIDTH, HEIGHT = 720, 640GOAL = (WIDTH / 2, 50)class Dot(object): def __init__(self, win): self.win = win self.fitness = 0 self.reachGoal = False self.dead = False self.brain = Brain(200) self.pos = [WIDTH / 2, HEIGHT - 50] self.vel = [0, 0] self.acc = [0, 0] self.isBest = False def move(self): if len(self.brain.directions) > self.brain.step: self.acc = self.brain.directions[self.brain.step] self.brain.step += 1 else: self.dead = True for i in range(len(self.vel)): self.vel[i] += self.acc[i] if self.vel[0] >= 5: self.vel[0] = 5 if self.vel[1] >= 5: self.vel[1] = 5 for i in range(len(self.pos)): self.pos[i] += self.vel[i] def show(self): if self.isBest: pygame.draw.circle(self.win, (0, 255, 0), self.pos, 4) else: pygame.draw.circle(self.win, (200, 100, 0), self.pos, 2) def updt(self): if not self.dead and not self.reachGoal: self.move() if self.pos[0] < 4 or self.pos[1] < 4 or self.pos[0] > WIDTH - 4 or self.pos[1] > HEIGHT - 4: self.dead = True elif math.hypot(self.pos[0] - GOAL[0], self.pos[1] - GOAL[1]) < 5: self.reachGoal = True def calculateFitness(self): distToGoal = math.hypot(self.pos[0] - GOAL[0], self.pos[1] - GOAL[1]) self.fitness = 1.0 / 16.0 + 10000.0 / (distToGoal * distToGoal) def baby(self): baby = Dot(self.win) baby.brain.directions = self.brain.directions return baby
问题在于我指定了最佳点不应该变异,但它还是变异了或是变成了更差的点,此外,我不知道为什么但每代生成的点越来越少(或者点的脑完全相同甚至一点都没有变异),变异率是100%但每次运行时点的数量越来越少。以下是第一代和第五代的截图:https://i.sstatic.net/cGUJG.jpg
另外,如果有人有Python的遗传算法模型可以参考,那会很有帮助。
回答:
我没有尝试过你提到的项目。你可以尝试PyGAD,这是一个用于构建遗传算法和训练机器学习算法的Python 3库。它是开源的,你可以在GitHub上找到代码。
PyGAD使用简单,允许你轻松控制交叉、变异和父本选择操作。你还可以使用PyGAD控制遗传算法的许多参数。
PyGAD还支持用户定义的适应度函数,因此你可以将其适应于广泛的问题类型。
安装PyGAD后(pip install pygad),这里有一个简单的例子来开始,它试图找到满足以下方程的最佳W1、W2和W3值:
44 = 4xW_1 – 2xW_2 + 1.2xW_3
import pygadimport numpyfunction_inputs = [4,-2, 1.2]desired_output = 44def fitness_func(solution, solution_idx): output = numpy.sum(solution*function_inputs) fitness = 1.0 / (numpy.abs(output - desired_output) + 0.000001) return fitnessdef on_generation(ga_instance): print(ga_instance.population)ga_instance = pygad.GA(num_generations=50, num_parents_mating=2, fitness_func=fitness_func, num_genes=3, sol_per_pop=5)ga_instance.run()ga_instance.plot_result()solution, solution_fitness, _ = ga_instance.best_solution()print("Parameters of the best solution : {solution}".format(solution=solution))print("Fitness value of the best solution = {solution_fitness}".format(solution_fitness=solution_fitness))