Combinatorial
These algorithms can be used for solving combinatorial optimization.
BRKGA
Biased Random Key Genetic Algorithm.
Metaheuristics.BRKGA
— FunctionBRKGA(num_elites = 20, num_mutants = 10, num_offsprings = 70, bias = 0.7)
Biased Random Key Genetic Algorithm (BRKGA).
Example
julia> target_perm = collect(reverse(1:10))
10-element Vector{Int64}:
10
9
8
7
6
5
4
3
2
1
julia> decode(rk) = sortperm(rk);
julia> f(rk) = sum(abs.(decode(rk) - target_perm));
julia> res = optimize(f, [zeros(10) ones(10)], BRKGA(num_elites=70))
Optimization Result
===================
Iteration: 22
Minimum: 0
Minimizer: [0.938595, 0.851247, 0.823736, …, 0.375321]
Function calls: 2200
Total time: 0.0238 s
Stop reason: Due to Convergence Termination criterion.
julia> decode(minimizer(res))
10-element Vector{Int64}:
10
9
8
7
6
5
4
3
2
1
GRASP
Greedy randomized adaptive search procedure.
Metaheuristics.GRASP
— TypeGRASP(;initial, constructor, local_search,...)
Greedy Randomized Adaptive Search Procedure.
Allowed parameters
initial
: an initial solution if necessary.constructor
parameters for the greedy constructor.local_search
the local search strategyBestImproveSearch()
(default) andFirstImproveSearch()
.
See GreedyRandomizedConstructor
Example: Knapsack Problem
import Metaheuristics as MH
# define type for saving knapsack problem instance
struct KPInstance
profit
weight
capacity
end
function MH.compute_cost(candidates, constructor, instance::KPInstance)
# Ration profit / weight
ratio = instance.profit[candidates] ./ instance.weight[candidates]
# It is assumed minimizing non-negative costs
maximum(ratio) .- ratio
end
function main()
# problem instance
profit = [55, 10,47, 5, 4, 50, 8, 61, 85, 87]
weight = [95, 4, 60, 32, 23, 72, 80, 62, 65, 46]
capacity = 269
optimum = 295
instance = KPInstance(profit, weight, capacity)
# objective function and search space
f, search_space, _ = MH.TestProblems.knapsack(profit, weight, capacity)
candidates = rand(search_space)
# define each GRASP component
constructor = MH.GreedyRandomizedConstructor(;candidates, instance, α = 0.95)
local_search = MH.BestImproveSearch()
neighborhood = MH.TwoOptNeighborhood()
grasp = MH.GRASP(;constructor, local_search)
# optimize and display results
result = MH.optimize(f, search_space, grasp)
display(result)
# compute GAP
fx = -minimum(result)
GAP = (optimum - fx) / optimum
println("The GAP is ", GAP)
end
main()
Metaheuristics.GreedyRandomizedConstructor
— TypeGreedyRandomizedConstructor(;candidates, instance, α, rng)
This structure can be used to use the Greedy Randomized Contructor.
candidates
vector of candidates (item to choose to form a solution).instance
a user define structure for saving data on the problem instance.α
controls the randomness (0 deterministic greedy heuristic, 1 for pure random).rng
storages the random number generator.
This constructor assumes overwriting the compute_cost
function:
import Metaheuristics as MH
struct MyInstance end
function MH.compute_cost(candidates, constructor, instance::MyInstance)
# ...
end
See also compute_cost
and GRASP
Metaheuristics.construct
— Functionconstruct(constructor)
Constructor procedure for GRASP.
See also GreedyRandomizedConstructor
, compute_cost
and GRASP
Metaheuristics.compute_cost
— Functioncompute_cost(candidates, constructor, instance)
Compute the cost for each candidate in candidates
, for given constructor
and provided instance
.
See also GreedyRandomizedConstructor
and GRASP
VND
Variable Neighborhood Descent.
Metaheuristics.VND
— TypeVND(;initial, neighborhood, local_search,...)
Variable Neighborhood Descent.
Allowed parameters
initial
: Use this parameter to provide an initial solution (optional).neighborhood
: Neighborhood structure.local_search
the local search strategyBestImproveSearch()
(default) andFirstImproveSearch()
.
Example: Knapsack Problem
import Metaheuristics as MH
struct MyKPNeighborhood <: MH.Neighborhood
k::Int
end
function MH.neighborhood_structure(x, s::MyKPNeighborhood, i::Integer)
# return the i-th neighbor around x, regarding s.k structure
i > length(x) && return nothing
reverse!(view(x, i:min(length(x), i+s.k)))
x
end
function main()
profit = [55, 10,47, 5, 4, 50, 8, 61, 85, 87]
weight = [95, 4, 60, 32, 23, 72, 80, 62, 65, 46]
capacity = 269
# objective function and search space
f, search_space, _ = MH.TestProblems.knapsack(profit, weight, capacity)
# list the neighborhood structures
neighborhood = [MyKPNeighborhood(1), MyKPNeighborhood(2), MyKPNeighborhood(3)]
local_search = MH.BestImproveSearch()
# instantiate VNS
vnd = MH.VND(;neighborhood, local_search)
res = MH.optimize(f, search_space, vnd)
display(res)
end
main()
VNS
General Variable Neighborhood Search.
Metaheuristics.VNS
— TypeVNS(;initial, neighborhood_shaking, neighborhood_local, local_search, neighborhood_change)
General Variational Neighborhood Search.
Allowed parameters
initial
: Use this parameter to provide an initial solution (optional).neighborhood_shaking
: Neighborhood structure for the shaking step.neighborhood_local
: Neighborhood structure for the local search.local_search
: the local search strategyBestImproveSearch()
(default) andFirstImproveSearch()
.neighborhood_change
: The procedure for changing among neighborhood structures (defaultSequentialChange()
).
Example: Knapsack Problem
import Metaheuristics as MH
struct MyKPNeighborhood <: MH.Neighborhood
k::Int
end
function MH.neighborhood_structure(x, s::MyKPNeighborhood, rng)
# this is defined due to shaking procedure requires a random one
# not the i-th neighbor.
i = rand(rng, 1:length(x))
reverse!(view(x, i:min(length(x), i+s.k)))
x
end
function MH.neighborhood_structure(x, s::MyKPNeighborhood, i::Integer)
# return the i-th neighbor around x, regarding s.k structure
i > length(x) && return nothing
reverse!(view(x, i:min(length(x), i+s.k)))
x
end
function main()
profit = [55, 10,47, 5, 4, 50, 8, 61, 85, 87]
weight = [95, 4, 60, 32, 23, 72, 80, 62, 65, 46]
capacity = 269
nitems = length(weight)
# objective function and search space
f, search_space, _ = MH.TestProblems.knapsack(profit, weight, capacity)
# list the neighborhood structures
neighborhood_shaking = [MyKPNeighborhood(6), MyKPNeighborhood(5), MyKPNeighborhood(4)]
neighborhood_local = [MyKPNeighborhood(3), MyKPNeighborhood(2), MyKPNeighborhood(1)]
local_search = MH.BestImproveSearch()
# instantiate VNS
vnd = MH.VNS(;neighborhood_shaking, neighborhood_local, local_search, options=MH.Options(verbose=true))
res = MH.optimize(f, search_space, vnd)
display(res)
end
main()
Mixed integer
General Variable Neighborhood Search.
Metaheuristics.MixedInteger
— TypeMixedInteger(algorithm; information = Information(), options = Options())
Create a wrapper to enable an algorithm
to handle mixed-integer problems. When MixedInteger
is used, the objective function will receive a dictionary with values in corresponding search space.
Example
# objective function
f(solution) = sum(solution[:integer]) * (1 .- sum(solution[:continuous])^2 )
# search spaces
integer_space = BoxConstrainedSpace(-10ones(Int, 6), 10ones(Int, 6))
continuous_space = BoxConstrainedSpace(-ones(3), ones(3))
# mix spaces to form a mixed space
mixed_space = MixedSpace(:integer => integer_space, :continuous => continuous_space)
# wrap ECA to handle mixed-integer variables
mip_eca = MixedInteger(ECA(N = 100), options = Options(seed = 1))
result = optimize(f, mixed_space, mip_eca)
# convert resulting minimizer (which is a vector) into a dictionary
solution = Metaheuristics.vec_to_dict(minimizer(result), mixed_space)
display(solution)
output
Dict{Symbol, Vector} with 2 entries:
:continuous => [-1.0, -1.0, -1.0]
:integer => [10, 10, 10, 10, 10, 10]