Well
CI-based Optimization Algorithm Code repository
Well, I highly encourage you to visit my GitHub page; I will try to gradually post some of the Python codes I am also working on here. All of it and much more can be found in my upcoming book “Computational Intelligence-based Optimization Algorithms From Theory to Practice“.
Pattern Search Algorithm
The pattern search algorithm is arguably one of the earliest, if not the first, meta-heuristic optimization algorithms. The ideas and novelties used in this algorithm would help shape the next generation of optimization algorithms.
import numpy as np
def init_generator(num_variable, min_val, max_val):
return np.random.uniform(low=min_val,high=max_val,size=num_variable)
def exploratory_move(X, mu, func, method='GPS'):
'''method = {'GPS, 'MADS'}'''
if method == 'GPS':
array_pos = mu*np.eye(len(X)) + X
array_neg = -mu*np.eye(len(X)) + X
mesh = np.concatenate((array_pos, array_neg), axis=0)
eval_results = np.apply_along_axis(func1d=func, axis=1, arr=mesh)
return mesh, eval_results
elif method == 'MADS':
array_pos = mu*np.eye(len(X)) + X
array_neg = np.reshape(-mu*np.ones(len(X)) + X, (1,len(X)))
mesh = np.concatenate((array_pos, array_neg), axis=0)
eval_results = np.apply_along_axis(func1d=func, axis=1, arr=mesh)
return mesh, eval_results
def exploratory_move(X, mu, func, method='GPS'):
'''method = {'GPS, 'MADS'}'''
if method == 'GPS':
array_pos = mu*np.eye(len(X)) + X
array_neg = -mu*np.eye(len(X)) + X
mesh = np.concatenate((array_pos, array_neg), axis=0)
eval_results = np.apply_along_axis(func1d=func, axis=1, arr=mesh)
return mesh, eval_results
elif method == 'MADS':
array_pos = mu*np.eye(len(X)) + X
array_neg = np.reshape(-mu*np.ones(len(X)) + X, (1,len(X)))
mesh = np.concatenate((array_pos, array_neg), axis=0)
eval_results = np.apply_along_axis(func1d=func, axis=1, arr=mesh)
return mesh, eval_results
def pattern_move(current_base, previous_base, alpha, func):
current_base = np.array(current_base)
previous_base = np.array(previous_base)
X_new = previous_base + alpha*(current_base-previous_base)
return X_new, func(X_new)
def PS_algorithem(mu_const, alpha, delta, obj_func,
final_step_size, num_variable, min_val,
max_val, meshing_method='GPS', minimizing = True, full_result=False):
results_list = list()
NFE_list = list()
NFE_value = 0
X = init_generator(num_variable, min_val, max_val)
NFE_value += 1
best_of = obj_func(X)
results_list.append(best_of)
NFE_list.append(NFE_value)
mu = mu_const
if minimizing:
while mu > final_step_size:
mesh, values = exploratory_move(X, mu, obj_func,
method=meshing_method)
if meshing_method == 'GPS':
NFE_value += 2*num_variable
else:
NFE_value += (num_variable+1)
if np.min(values) < best_of:
mu = mu_const
best_of = np.min(values)
results_list.append(best_of)
NFE_list.append(NFE_value)
current_base = mesh[np.argmin(values)]
previous_base = X
while True:
X_new, of_new = pattern_move(current_base, alpha,
previous_base, obj_func)
NFE_value += 1
if of_new < best_of:
previous_base = current_base
current_base, best_of = X_new, of_new
results_list.append(best_of)
NFE_list.append(NFE_value)
mesh, values = exploratory_move(current_base, mu,
obj_func,
method=meshing_method)
if meshing_method == 'GPS':
NFE_value += 2*num_variable
else:
NFE_value += (num_variable+1)
if np.min(values) < best_of:
best_of = np.min(values)
results_list.append(best_of)
NFE_list.append(NFE_value)
previous_base = current_base
current_base = mesh[np.argmin(values)]
else:
mu -= delta
X = current_base
results_list.append(best_of)
NFE_list.append(NFE_value)
break
else:
mu -= delta
results_list.append(best_of)
NFE_list.append(NFE_value)
else:
while mu > final_step_size:
mesh, values = exploratory_move(X, mu, obj_func,
method=meshing_method)
if meshing_method == 'GPS':
NFE_value += 2*num_variable
else:
NFE_value += (num_variable+1)
if np.max(values) > best_of:
mu = mu_const
best_of = np.max(values)
results_list.append(best_of)
NFE_list.append(NFE_value)
current_base = mesh[np.argmax(values)]
previous_base = X
while True:
X_new, of_new = pattern_move(current_base, alpha,
previous_base, obj_func)
NFE_value += 1
if of_new > best_of:
previous_base = current_base
current_base, best_of = X_new, of_new
results_list.append(best_of)
NFE_list.append(NFE_value)
mesh, values = exploratory_move(current_base, mu,
obj_func,
method=meshing_method)
if meshing_method == 'GPS':
NFE_value += 2*num_variable
else:
NFE_value += (num_variable+1)
if np.max(values) > best_of:
best_of = np.max(values)
results_list.append(best_of)
NFE_list.append(NFE_value)
previous_base = current_base
current_base = mesh[np.argmax(values)]
else:
mu -= delta
X = current_base
results_list.append(best_of)
NFE_list.append(NFE_value)
break
else:
mu -= delta
results_list.append(best_of)
NFE_list.append(NFE_value)
if not full_result:
return X, best_of
else:
return X, best_of, results_list, NFE_list
- Please note that this is for educational purposes and not necessarily the fastest option to execute your code.
- A lot of time and effort has been put into preparing these codes, so I would appreciate it if you could support me by purchasing the book and acknowledging the source material should you decide to use it yourself. Thanks.
Computational Intelligence-based Optimization Algorithms: From Theory to Practice
Computational intelligence-based optimization methods, also known as meta-heuristic optimization algorithms, are a popular topic in mathematical programming. These methods have bridged the gap between various approaches and created a new school of thought to solve real-world optimization problems. In this book, we have selected some of the most effective and renowned algorithms in the literature. These algorithms are not only practical, but they also provide thought-provoking theoretical ideas to help readers understand how they solve optimization problems. Each chapter includes a brief review of the algorithm’s background and the fields it has been used in. Additionally, Python code is provided for all algorithms at the end of each chapter, making this book a valuable resource for beginner and intermediate programmers looking to understand these algorithms.