2020/11/06 13:56

# 一种混合均值中心反向学习粒子群优化算法的python程序实现

## 主体程序程序

import numpy as np
import math
import random
import matplotlib.pyplot as plt
import function

class Particles:
def __init__(self, num, d, iterMax, c1, c2, wMax, wMin, xMax, xMin, vMax, vMin, x=0, v=0):
self.num = num  # 粒子个数
self.d = d  # 粒子维数
self.iterMax = iterMax  # 最大迭代次数
self.c1 = c1  # 学习因子
self.c2 = c2  # 学习因子
self.wMax = wMax  # 惯性权重最大值
self.wMin = wMin  # 惯性权重最小值
self.xMax = xMax  # 位置最大值
self.xMin = xMin  # 位置最小值
self.vMax = vMax  # 速度最大值
self.vMin = vMin  # 速度最小值
self.x = x  # 位置
self.v = v  # 速度
self.init()

def init(self):
self.x = np.random.rand(self.num, self.d) * (self.xMax - self.xMin) + self.xMin  # 初始化位置
print(type(self.x))
self.v = np.random.rand(self.num, self.d) * (self.vMax - self.vMin) + self.vMin  # 初始化速度

particles = Particles(20, 2, 200, 2, 2, 1, 0.4, 20, -20, 1, -1)
p_best = np.ones((particles.num, 1))  # 定义历史最优值
p_Position = np.ones((particles.num, particles.d))  # 定义历史最优位置
g_best = float('inf')  # 定义整个族群的历史最优值
g_Position = np.ones((1, particles.d))  # 定义整个族群的历史最优位置
g_best_curve = np.ones((1, particles.iterMax))  # 定义“Fitness evolution curve”点
max_HMC = np.zeros((1, particles.d))  # 混合均值中心最大范围
min_HMC = np.zeros((1, particles.d))  # 混合均值中心最小范围

def g_best_find():
"""
族群最优获取
"""
global g_best
global g_Position
for i in range(particles.num):
if p_best[i] < g_best:
g_best = p_best[i]
g_Position = p_Position[i]

def p_best_find():
"""
最优获取
"""
global p_best
global p_Position
for i in range(particles.num):
p = function.function0(particles.x[i][0], particles.x[i][1])
if p < p_best[i]:
p_best[i] = p
p_Position[i] = particles.x[i]

def MC():
"""
均值计算
@return: 均值
"""
x_average = np.zeros((1, particles.d))
for i in range(particles.num):
x_average = x_average + particles.x[i]
x_average = x_average / particles.num
return x_average

def MVF():
"""
适应值均值计算
@return: 适应值均值
"""
fitness_average = 0
for i in range(particles.num):
fitness_average = fitness_average + function.function0(particles.x[i][0], particles.x[i][1])
fitness_average = fitness_average / particles.num
return fitness_average

def PMC(fitness_average):
"""
偏均值中心计算
@param fitness_average: 适应值均值
@return: 偏均值中心
"""
x_P_average = np.zeros((1, particles.d))
n = 0
for i in range(particles.num):
if function.function0(particles.x[i][0], particles.x[i][1]) < fitness_average:
x_P_average = x_P_average + particles.x[i]
n += 1
x_P_average = x_P_average / n
return x_P_average

def HMC(x_average, x_P_average):
"""
混合均值中心计算
@param x_average: 均值中心
@param x_P_average: 偏均值中心
@return: 混合均值中心
"""
if function.function0(x_average[0][0], x_average[0][1]) \
< function.function0(x_P_average[0][0], x_P_average[0][1]):
return x_P_average
else:
return x_average

def OBL_HMC(a, b, HMC):
"""
反向混合均值中心计算
@param a: 历史最小混合均值中心
@param b: 历史最大混合均值中心
@param HMC: 混合均值中心
@return: 反向混合均值中心
"""
return random.random() * (a + b) - HMC

def mutation():
"""
混合均值中心进化&反向混合均值中心进化
"""
global g_best
global g_Position
global max_HMC
global min_HMC
mc = MC()
mvf = MVF()
pmc = PMC(mvf)
hmc = HMC(mc, pmc)
for i in range(particles.d):
if max_HMC[0][i] < hmc[0][i]:
max_HMC[0][i] = hmc[0][i]
if min_HMC[0][i] > hmc[0][i]:
min_HMC[0][i] = hmc[0][i]
g = function.function0(hmc[0][0], hmc[0][1])
if g < g_best:
g_best = g
g_Position = hmc
obl_hmc = OBL_HMC(min_HMC, max_HMC, hmc)
g = function.function0(obl_hmc[0][0], obl_hmc[0][1])
if g < g_best:
g_best = g
g_Position = hmc

def main():
"""
主函数
@return: None
"""
global g_best
global max_HMC
global min_HMC
""" 最优初始化"""
for i in range(particles.num):
p_best[i] = function.function0(particles.x[i][0], particles.x[i][1])
p_Position[i] = particles.x[i]
"""  迭代开始  """
"""  迭代初始化  """
p_best_find()
g_best_find()
""" HMC范围初始化 """
mc = MC()
mvf = MVF()
pmc = PMC(mvf)
hmc = HMC(mc, pmc)
max_HMC = min_HMC = hmc
"""
"""
w = 1
for i in range(particles.iterMax):
"""
"""
w = 0.25 * math.exp(-0.5 * i / particles.iterMax)
for j in range(particles.num):
particles.v[j] = w * particles.v[j] + particles.c1 * random.random() \
* (p_Position[j] - particles.x[j]) + particles.c2 \
* random.random() * (g_Position - particles.x[j])
for n in range(particles.d):  # 速度界限判断
if particles.v[j][n] > particles.vMax:
particles.v[j][n] = particles.vMax
if particles.v[j][n] < particles.vMin:
particles.v[j][n] = particles.vMin
particles.x[j] = particles.x[j] + particles.v[j]  # 粒子位置更新
"""
"""
for n in range(particles.d):  # 位置界限判断
if particles.x[j][n] > particles.xMax:
particles.x[j][n] = particles.xMax
if particles.x[j][n] < particles.xMin:
particles.x[j][n] = particles.xMin
p_best_find()
g_best_find()
mutation()  # 混合均值中心进化&反向混合均值中心进化
g_best_curve[0][i] = g_best
"""
"""
print("函数最优值：" + str(g_best))
print(g_Position)
c = np.arange(0, particles.iterMax, 1).tolist()
gb_curve_list = g_best_curve.tolist()
plt.plot(c, gb_curve_list[0], "r")
plt.title("Fitness evolution curve")
#plt.savefig('./H图/HCOPSO_Himm10.jpg')
plt.show()

if __name__ == "__main__":
main()
# print("hello")


## 测试函数（以下程序文件取名function.py,把要测试的函数改名为function0()）

import math

def function1(x1, x2):
"""
Ackley function
@param x1:
@param x2:
@return: y值
"""
y = (-20 * math.exp(-0.2 * math.sqrt((x1 ** 2 + x2 ** 2) / 2)) -
math.exp((math.cos(2 * math.pi * x1) + math.cos(2 * math.pi * x2)) / 2)
+ 20 + math.e)
return y

def function2(x1, x2):
"""
Himmelblau function
@param x1:
@param x2:
@return: y值
"""
y = (x1 ** 2 + x2 - 11) ** 2 + (x1 + x2 ** 2 - 7) ** 2
return y

def function0(x1, x2):
"""
Rastrigin function
@param x1:
@param x2:
@return: y值
"""
y = 2 * 10 + x1 ** 2 - 10 * math.cos(2 * math.pi * x1) + x2 ** 2 - 10 * math.cos(2 * math.pi * x2)
return y


0
0 收藏

0 评论
0 收藏
0