From 70aae75978a13d53f6a569f8196efbcd0509fac3 Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Thu, 10 Jul 2025 22:31:12 +0530 Subject: [PATCH 01/23] DAE based solvers --- docs/src/optimization_packages/ode.md | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/docs/src/optimization_packages/ode.md b/docs/src/optimization_packages/ode.md index f89d348dc..b3ab031bc 100644 --- a/docs/src/optimization_packages/ode.md +++ b/docs/src/optimization_packages/ode.md @@ -28,8 +28,8 @@ p = [] f_manual = OptimizationFunction(f, SciMLBase.NoAD(); grad = g!) prob_manual = OptimizationProblem(f_manual, x0) -opt = ODEGradientDescent(dt=0.01) -sol = solve(prob_manual, opt; maxiters=50_000) +opt = ODEGradientDescent() +sol = solve(prob_manual, opt; dt=0.01, maxiters=50_000) @show sol.u @show sol.objective @@ -39,7 +39,7 @@ sol = solve(prob_manual, opt; maxiters=50_000) All provided optimizers are **gradient-based local optimizers** that solve optimization problems by integrating gradient-based ODEs to convergence: -* `ODEGradientDescent(dt=...)` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems. +* `ODEGradientDescent()` — performs basic gradient descent using the explicit Euler method. This is a simple and efficient method suitable for small-scale or well-conditioned problems. * `RKChebyshevDescent()` — uses the ROCK2 solver, a stabilized explicit Runge-Kutta method suitable for stiff problems. It allows larger step sizes while maintaining stability. @@ -47,7 +47,15 @@ All provided optimizers are **gradient-based local optimizers** that solve optim * `HighOrderDescent()` — applies Vern7, a high-order (7th-order) explicit Runge-Kutta method for even more accurate integration. This can be beneficial for problems requiring high precision. -You can also define a custom optimizer using the generic `ODEOptimizer(solver; dt=nothing)` constructor by supplying any ODE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/). +## DAE-based Optimizers + +In addition to ODE-based optimizers, OptimizationODE.jl provides optimizers for differential-algebraic equation (DAE) constrained problems: + +* `DAEMassMatrix()` — uses the Rodas5 solver (from OrdinaryDiffEq.jl) for DAE problems with a mass matrix formulation. + +* `DAEIndexing()` — uses the IDA solver (from Sundials.jl) for DAE problems with index variable support. + +You can also define a custom optimizer using the generic `ODEOptimizer(solver)` or `DAEOptimizer(solver)` constructor by supplying any ODE or DAE solver supported by [OrdinaryDiffEq.jl](https://docs.sciml.ai/DiffEqDocs/stable/solvers/ode_solve/) or [Sundials.jl](https://github.com/SciML/Sundials.jl). ## DAE-based Optimizers From 433e2b2eb2211ff48e53e128abeb20a7f9dbdead Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Fri, 11 Jul 2025 01:17:34 +0530 Subject: [PATCH 02/23] MOO tests and code updates --- lib/OptimizationBBO/src/OptimizationBBO.jl | 34 +++++++++++--- lib/OptimizationBBO/test/runtests.jl | 44 +++++++++++++++++++ lib/OptimizationEvolutionary/test/runtests.jl | 38 ++++++++++++++++ 3 files changed, 111 insertions(+), 5 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 57f874356..3c707f3da 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -59,9 +59,25 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; if !isnothing(reltol) @warn "common reltol is currently not used by $(opt)" end - mapped_args = (; kwargs...) - mapped_args = (; mapped_args..., Method = opt.method, - SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)]) + + # Determine number of objectives for multi-objective problems + if isa(prob.f, MultiObjectiveOptimizationFunction) + num_objectives = length(prob.f.cost_prototype) + mapped_args = (; kwargs...) + mapped_args = (; mapped_args..., Method = opt.method, + SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], + NumDimensions = length(prob.lb), + NumObjectives = num_objectives) + # FitnessScheme should be in opt, not the function + if hasproperty(opt, :FitnessScheme) + mapped_args = (; mapped_args..., FitnessScheme = opt.FitnessScheme) + end + else + mapped_args = (; kwargs...) + mapped_args = (; mapped_args..., Method = opt.method, + SearchRange = [(prob.lb[i], prob.ub[i]) for i in 1:length(prob.lb)], + NumDimensions = length(prob.lb)) + end if !isnothing(callback) mapped_args = (; mapped_args..., CallbackFunction = callback, @@ -151,8 +167,16 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ maxiters = Optimization._check_and_convert_maxiters(cache.solver_args.maxiters) maxtime = Optimization._check_and_convert_maxtime(cache.solver_args.maxtime) - _loss = function (θ) - cache.f(θ, cache.p) + + # Multi-objective: use out-of-place or in-place as appropriate + if isa(cache.f, MultiObjectiveOptimizationFunction) + if cache.f.iip + _loss = θ -> (cost = similar(cache.f.cost_prototype); cache.f.f(cost, θ, cache.p); cost) + else + _loss = θ -> cache.f.f(θ, cache.p) + end + else + _loss = θ -> cache.f(θ, cache.p) end opt_args = __map_optimizer_args(cache, cache.opt; diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index 1295465fc..a6aab5af2 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -34,6 +34,50 @@ using Test push!(loss_history, fitness) return false end + + @testset "In-place Multi-Objective Optimization" begin + function inplace_multi_obj!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + u0 = [0.25, 0.25] + lb = [0.0, 0.0] + ub = [2.0, 2.0] + cost_prototype = zeros(2) + mof_inplace = MultiObjectiveOptimizationFunction(inplace_multi_obj!; cost_prototype=cost_prototype) + prob_inplace = Optimization.OptimizationProblem(mof_inplace, u0; lb=lb, ub=ub) + sol_inplace = solve(prob_inplace, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + @test sol_inplace ≠ nothing + @test length(sol_inplace.objective) == 2 + @test sol_inplace.objective[1] ≈ 6.9905986e-18 atol=1e-3 + @test sol_inplace.objective[2] ≈ 1.7763568e-15 atol=1e-3 + end + + @testset "Custom coalesce for Multi-Objective" begin + function multi_obj_tuple(x, p) + f1 = sum(x .^ 2) + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return (f1, f2) + end + coalesce_sum(cost, x, p) = sum(cost) + mof_coalesce = MultiObjectiveOptimizationFunction(multi_obj_tuple; coalesce=coalesce_sum) + prob_coalesce = Optimization.OptimizationProblem(mof_coalesce, u0; lb=lb, ub=ub) + sol_coalesce = solve(prob_coalesce, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) + @test sol_coalesce ≠ nothing + @test sol_coalesce.objective[1] ≈ 6.9905986e-18 atol=1e-3 + @test sol_coalesce.objective[2] ≈ 1.7763568e-15 atol=1e-3 + @test mof_coalesce.coalesce([1.0, 2.0], [0.0, 0.0], nothing) == 3.0 + end + + @testset "Error if in-place MultiObjectiveOptimizationFunction without cost_prototype" begin + function inplace_multi_obj_err!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + @test_throws ArgumentError MultiObjectiveOptimizationFunction(inplace_multi_obj_err!) + end sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), callback = cb) # println(fitness_progress_history) @test !isempty(fitness_progress_history) diff --git a/lib/OptimizationEvolutionary/test/runtests.jl b/lib/OptimizationEvolutionary/test/runtests.jl index 1bd810664..13c0f4fa3 100644 --- a/lib/OptimizationEvolutionary/test/runtests.jl +++ b/lib/OptimizationEvolutionary/test/runtests.jl @@ -40,6 +40,44 @@ Random.seed!(1234) if state.iter % 10 == 0 println(state.u) end + + @testset "In-place Multi-Objective Optimization" begin + function inplace_multi_obj!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + u0 = [0.25, 0.25] + cost_prototype = zeros(2) + mof_inplace = MultiObjectiveOptimizationFunction(inplace_multi_obj!; cost_prototype=cost_prototype) + prob_inplace = OptimizationProblem(mof_inplace, u0) + sol_inplace = solve(prob_inplace, NSGA2()) + @test sol_inplace ≠ nothing + @test length(sol_inplace.objective) == 2 + end + + @testset "Custom coalesce for Multi-Objective" begin + function multi_obj_tuple(x, p) + f1 = sum(x .^ 2) + f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return (f1, f2) + end + coalesce_sum(cost, x, p) = sum(cost) + mof_coalesce = MultiObjectiveOptimizationFunction(multi_obj_tuple; coalesce=coalesce_sum) + prob_coalesce = OptimizationProblem(mof_coalesce, u0) + sol_coalesce = solve(prob_coalesce, NSGA2()) + @test sol_coalesce ≠ nothing + @test mof_coalesce.coalesce([1.0, 2.0], [0.0, 0.0], nothing) == 3.0 + end + + @testset "Error if in-place MultiObjectiveOptimizationFunction without cost_prototype" begin + function inplace_multi_obj_err!(cost, x, p) + cost[1] = sum(x .^ 2) + cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) + return nothing + end + @test_throws ArgumentError MultiObjectiveOptimizationFunction(inplace_multi_obj_err!) + end return false end solve(prob, CMAES(μ = 40, λ = 100), callback = cb, maxiters = 100) From 4e1a5f397438f02b5f4b26d09048541c75573416 Mon Sep 17 00:00:00 2001 From: ParasPuneetSingh Date: Sun, 20 Jul 2025 19:22:49 +0530 Subject: [PATCH 03/23] import changes --- lib/OptimizationBBO/src/OptimizationBBO.jl | 5 ++- lib/OptimizationBBO/test/runtests.jl | 40 ++++++++-------------- 2 files changed, 16 insertions(+), 29 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 3c707f3da..8b44618f4 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -132,8 +132,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ LC, UC, S, - O <: - BBO, + O <: BBO, D, P, C @@ -170,7 +169,7 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ # Multi-objective: use out-of-place or in-place as appropriate if isa(cache.f, MultiObjectiveOptimizationFunction) - if cache.f.iip + if is_inplace(cache.f) _loss = θ -> (cost = similar(cache.f.cost_prototype); cache.f.f(cost, θ, cache.p); cost) else _loss = θ -> cache.f.f(θ, cache.p) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index a6aab5af2..20b23da19 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -1,4 +1,5 @@ using OptimizationBBO, Optimization, BlackBoxOptim +using Optimization.SciMLBase using Optimization.SciMLBase: MultiObjectiveOptimizationFunction using Test @@ -35,17 +36,20 @@ using Test return false end + # Define the initial guess and bounds ONCE for all tests + u0 = [0.25, 0.25] + lb = [0.0, 0.0] + ub = [2.0, 2.0] + opt = OptimizationBBO.BBO_borg_moea() + @testset "In-place Multi-Objective Optimization" begin function inplace_multi_obj!(cost, x, p) cost[1] = sum(x .^ 2) cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) return nothing end - u0 = [0.25, 0.25] - lb = [0.0, 0.0] - ub = [2.0, 2.0] cost_prototype = zeros(2) - mof_inplace = MultiObjectiveOptimizationFunction(inplace_multi_obj!; cost_prototype=cost_prototype) + mof_inplace = MultiObjectiveOptimizationFunction{true}(inplace_multi_obj!, SciMLBase.NoAD(); cost_prototype=cost_prototype) prob_inplace = Optimization.OptimizationProblem(mof_inplace, u0; lb=lb, ub=ub) sol_inplace = solve(prob_inplace, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) @test sol_inplace ≠ nothing @@ -60,24 +64,14 @@ using Test f2 = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) return (f1, f2) end - coalesce_sum(cost, x, p) = sum(cost) - mof_coalesce = MultiObjectiveOptimizationFunction(multi_obj_tuple; coalesce=coalesce_sum) + mof_coalesce = MultiObjectiveOptimizationFunction{false}(multi_obj_tuple, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_coalesce = Optimization.OptimizationProblem(mof_coalesce, u0; lb=lb, ub=ub) sol_coalesce = solve(prob_coalesce, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) @test sol_coalesce ≠ nothing @test sol_coalesce.objective[1] ≈ 6.9905986e-18 atol=1e-3 @test sol_coalesce.objective[2] ≈ 1.7763568e-15 atol=1e-3 - @test mof_coalesce.coalesce([1.0, 2.0], [0.0, 0.0], nothing) == 3.0 end - @testset "Error if in-place MultiObjectiveOptimizationFunction without cost_prototype" begin - function inplace_multi_obj_err!(cost, x, p) - cost[1] = sum(x .^ 2) - cost[2] = sum(x .^ 2 .- 10 .* cos.(2π .* x) .+ 10) - return nothing - end - @test_throws ArgumentError MultiObjectiveOptimizationFunction(inplace_multi_obj_err!) - end sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), callback = cb) # println(fitness_progress_history) @test !isempty(fitness_progress_history) @@ -99,13 +93,7 @@ using Test maxtime = 5) end - # Define the initial guess and bounds - u0 = [0.25, 0.25] - lb = [0.0, 0.0] - ub = [2.0, 2.0] - - # Define the optimizer - opt = OptimizationBBO.BBO_borg_moea() + # ...existing code... @testset "Multi-Objective Optimization Tests" begin @@ -117,7 +105,7 @@ using Test return (f1, f2) end - mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) + mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) sol_1 = solve(prob_1, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) @@ -144,7 +132,7 @@ using Test return false end - mof_1 = MultiObjectiveOptimizationFunction(multi_obj_func_1) + mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) sol_1 = solve(prob_1, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true), @@ -170,7 +158,7 @@ using Test return (f1, f2) end - mof_2 = MultiObjectiveOptimizationFunction(multi_obj_func_2) + mof_2 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_2, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb = lb, ub = ub) sol_2 = solve(prob_2, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) @@ -190,7 +178,7 @@ using Test return (f1, f2) end - mof_3 = MultiObjectiveOptimizationFunction(multi_obj_func_3) + mof_3 = SciMLBase.MultiObjectiveOptimizationFunction{false}(multi_obj_func_3, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb = lb, ub = ub) sol_3 = solve(prob_3, opt, NumDimensions = 2, FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) From 59c2254be26fc108953e13c57c7e670757931cca Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sat, 7 Sep 2024 17:16:09 +0530 Subject: [PATCH 04/23] MOO Docs updated blackboxoptim.md Added documentation for MOO in BBO --- .../src/optimization_packages/blackboxoptim.md | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ca5b2385b..738add663 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -67,3 +67,21 @@ prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, maxtime = 1000.0) ``` + +## Multi-objective optimization +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your fitness function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: + +```@example MOO-BBO +using OptimizationBBO, Optimization, BlackBoxOptim +using SciMLBase: MultiObjectiveOptimizationFunction +u0 = [0.25, 0.25] +opt = OptimizationBBO.BBO_borg_moea() +function multi_obj_func_2(x, p) + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return (f1, f2) +end +mof = MultiObjectiveOptimizationFunction(multi_obj_func_2) +prob = Optimization.OptimizationProblem(mof_2, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) +sol = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) +``` From 8b347fccadf097df3f1c7870898cc35616fb4e75 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 20 Sep 2024 21:52:51 +0530 Subject: [PATCH 05/23] Update evolutionary.md MOO docs update. --- .../src/optimization_packages/evolutionary.md | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 9fa582c74..9ed2a2645 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -41,3 +41,23 @@ f = OptimizationFunction(rosenbrock) prob = Optimization.OptimizationProblem(f, x0, p, lb = [-1.0, -1.0], ub = [1.0, 1.0]) sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100)) ``` + +## Multi-objective optimization +The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows: + +```@example MOO-Evolutionary +using Optimization, OptimizationEvolutionary +function func(x, p=nothing)::Vector{Float64} + f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function + f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function + return [f1, f2] +end +initial_guess = [1.0, 1.0] +function gradient_multi_objective(x, p=nothing) + ForwardDiff.jacobian(func, x) +end +obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) +algorithm = OptimizationEvolutionary.NSGA2() +problem = OptimizationProblem(obj_func, initial_guess) +result = solve(problem, algorithm) +``` From 8f46ec098234a9338c401ebf4df3bd40716f8e65 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Fri, 20 Sep 2024 21:59:09 +0530 Subject: [PATCH 06/23] Update metaheuristics.md MOO docs update. --- .../optimization_packages/metaheuristics.md | 51 +++++++++++++++++++ 1 file changed, 51 insertions(+) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index ae1694bcc..53b6750ad 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -70,3 +70,54 @@ sol = solve(prob, ECA(), use_initial = true, maxiters = 100000, maxtime = 1000.0 ### With Constraint Equations While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently does not relay these constraints. + + +## Multi-objective optimization +The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: + +```@example MOO-Evolutionary +using Optimization, OptimizationEvolutionary +function zdt1(x) + f1 = x[1] + g = 1 + 9 * mean(x[2:end]) + h = 1 - sqrt(f1 / g) + f2 = g * h + # In this example, we have no constraints + gx = [0.0] # Inequality constraints (not used) + hx = [0.0] # Equality constraints (not used) + return [f1, f2], gx, hx +end +multi_obj_fun = MultiObjectiveOptimizationFunction((x, p) -> zdt1(x)) + +# Define the problem bounds +lower_bounds = [0.0, 0.0, 0.0] +upper_bounds = [1.0, 1.0, 1.0] + +# Define the initial guess +initial_guess = [0.5, 0.5, 0.5] + +# Create the optimization problem +prob = OptimizationProblem(multi_obj_fun, initial_guess; lb = lower_bounds, ub = upper_bounds) + +nobjectives = 2 +npartitions = 100 + +# reference points (Das and Dennis's method) +weights = gen_ref_dirs(nobjectives, npartitions) + +# Choose the algorithm as required. +alg1 = NSGA2() +alg2 = NSGA3() +alg3 = SPEA2() +alg4 = CCMO(NSGA2(N=100, p_m=0.001)) +alg5 = MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) +alg6 = SMS_EMOA() + +# Solve the problem +sol1 = solve(prob, alg1; maxiters = 100, use_initial = true) +sol2 = solve(prob, alg2; maxiters = 100, use_initial = true) +sol3 = solve(prob, alg3; maxiters = 100, use_initial = true) +sol4 = solve(prob, alg4) +sol5 = solve(prob, alg5; maxiters = 100, use_initial = true) +sol6 = solve(prob, alg6; maxiters = 100, use_initial = true) +``` From d5f3ea582a6f45754648e88cb5cbfa6e0a25e2dc Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Sat, 21 Sep 2024 21:10:29 -0400 Subject: [PATCH 07/23] Update docs/src/optimization_packages/blackboxoptim.md --- docs/src/optimization_packages/blackboxoptim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index 738add663..010bd63fe 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -69,7 +69,7 @@ sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, ``` ## Multi-objective optimization -The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your fitness function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: ```@example MOO-BBO using OptimizationBBO, Optimization, BlackBoxOptim From 716a62646b59a6179795be767f761f38d57cd28f Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 22 Sep 2024 10:22:10 +0530 Subject: [PATCH 08/23] Update Project.toml updated project.toml for the docs. --- docs/Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/Project.toml b/docs/Project.toml index 5363260e7..ae06a11e5 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -46,6 +46,7 @@ SymbolicAnalysis = "4297ee4d-0239-47d8-ba5d-195ecdf594fe" Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" +BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" [compat] AmplNLWriter = "1" From 2aadfdd27d508b4136fb1abd624fa950d4637ccc Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 22 Sep 2024 10:23:06 +0530 Subject: [PATCH 09/23] Update Project.toml Added compat for BBO. --- docs/Project.toml | 1 + 1 file changed, 1 insertion(+) diff --git a/docs/Project.toml b/docs/Project.toml index ae06a11e5..b18fd76fe 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -94,3 +94,4 @@ SymbolicAnalysis = "0.3" Symbolics = "6" Tracker = ">= 0.2" Zygote = ">= 0.5" +BlackBoxOptim = "0.6" From 01c2957a90916993dcf8bff3cc5ca634e4fa4db3 Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Sun, 22 Sep 2024 13:28:57 -0400 Subject: [PATCH 10/23] Update docs/src/optimization_packages/blackboxoptim.md --- docs/src/optimization_packages/blackboxoptim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index 010bd63fe..ae512ca9f 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -82,6 +82,6 @@ function multi_obj_func_2(x, p) return (f1, f2) end mof = MultiObjectiveOptimizationFunction(multi_obj_func_2) -prob = Optimization.OptimizationProblem(mof_2, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) +prob = Optimization.OptimizationProblem(mof, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) sol = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) ``` From 2caa0c153b2cb1cc7e2a1c3bc1792cef920b05eb Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 24 Sep 2024 02:06:07 +0530 Subject: [PATCH 11/23] Update metaheuristics.md Added required packages for MOO docs. --- docs/src/optimization_packages/metaheuristics.md | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 53b6750ad..13fd60651 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -76,7 +76,7 @@ While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: ```@example MOO-Evolutionary -using Optimization, OptimizationEvolutionary +using Optimization, OptimizationEvolutionary,OptimizationMetaheuristics, Metaheuristics function zdt1(x) f1 = x[1] g = 1 + 9 * mean(x[2:end]) @@ -103,7 +103,7 @@ nobjectives = 2 npartitions = 100 # reference points (Das and Dennis's method) -weights = gen_ref_dirs(nobjectives, npartitions) +weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) # Choose the algorithm as required. alg1 = NSGA2() From 968e81abb0e90ee5166934f9e4835a98637e1f4c Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 24 Sep 2024 02:07:13 +0530 Subject: [PATCH 12/23] Update Project.toml added required packages for MOO --- docs/Project.toml | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/Project.toml b/docs/Project.toml index b18fd76fe..6b09dd362 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -47,6 +47,7 @@ Symbolics = "0c5d862f-8b57-4792-8d23-62f2024744c7" Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" +Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898" [compat] AmplNLWriter = "1" @@ -95,3 +96,5 @@ Symbolics = "6" Tracker = ">= 0.2" Zygote = ">= 0.5" BlackBoxOptim = "0.6" +Metaheuristics = "3" + From a5f2efef45d948d2b6f597ddb3137c9e5b503003 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Tue, 24 Sep 2024 02:08:41 +0530 Subject: [PATCH 13/23] Update blackboxoptim.md Corrected function names for MOO docs. --- docs/src/optimization_packages/blackboxoptim.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ae512ca9f..ce17f3b1b 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -76,12 +76,12 @@ using OptimizationBBO, Optimization, BlackBoxOptim using SciMLBase: MultiObjectiveOptimizationFunction u0 = [0.25, 0.25] opt = OptimizationBBO.BBO_borg_moea() -function multi_obj_func_2(x, p) +function multi_obj_func(x, p) f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function return (f1, f2) end -mof = MultiObjectiveOptimizationFunction(multi_obj_func_2) +mof = MultiObjectiveOptimizationFunction(multi_obj_func) prob = Optimization.OptimizationProblem(mof, u0; lb = [0.0, 0.0], ub = [2.0, 2.0]) -sol = solve(prob_2, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) +sol = solve(prob, opt, NumDimensions=2, FitnessScheme=ParetoFitnessScheme{2}(is_minimizing=true)) ``` From ce74e9c5f833530b6a8cf30d9758aca78d5813d1 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 26 Sep 2024 00:13:37 +0530 Subject: [PATCH 14/23] Update evolutionary.md Removed unnecessary FowardDiff function. --- docs/src/optimization_packages/evolutionary.md | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 9ed2a2645..03da1985a 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -53,10 +53,7 @@ function func(x, p=nothing)::Vector{Float64} return [f1, f2] end initial_guess = [1.0, 1.0] -function gradient_multi_objective(x, p=nothing) - ForwardDiff.jacobian(func, x) -end -obj_func = MultiObjectiveOptimizationFunction(func, jac=gradient_multi_objective) +obj_func = MultiObjectiveOptimizationFunction(func) algorithm = OptimizationEvolutionary.NSGA2() problem = OptimizationProblem(obj_func, initial_guess) result = solve(problem, algorithm) From d7b7819a7d401529902e7a81be02ebee8612c161 Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Thu, 26 Sep 2024 20:17:50 -0400 Subject: [PATCH 15/23] Update docs/src/optimization_packages/metaheuristics.md --- docs/src/optimization_packages/metaheuristics.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 13fd60651..3a6f37fdf 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -75,7 +75,7 @@ While `Metaheuristics.jl` supports such constraints, `Optimization.jl` currently ## Multi-objective optimization The zdt1 functions can be optimized using the `Metaheuristics.jl` as follows: -```@example MOO-Evolutionary +```@example MOO-Metaheuristics using Optimization, OptimizationEvolutionary,OptimizationMetaheuristics, Metaheuristics function zdt1(x) f1 = x[1] From 42542788844a1653d2235473bbc34eeb8d0c8cbf Mon Sep 17 00:00:00 2001 From: Vaibhav Kumar Dixit Date: Fri, 25 Oct 2024 23:13:53 -0400 Subject: [PATCH 16/23] Update docs/src/optimization_packages/evolutionary.md --- docs/src/optimization_packages/evolutionary.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/evolutionary.md b/docs/src/optimization_packages/evolutionary.md index 03da1985a..86cf27b71 100644 --- a/docs/src/optimization_packages/evolutionary.md +++ b/docs/src/optimization_packages/evolutionary.md @@ -46,7 +46,7 @@ sol = solve(prob, Evolutionary.CMAES(μ = 40, λ = 100)) The Rosenbrock and Ackley functions can be optimized using the `Evolutionary.NSGA2()` as follows: ```@example MOO-Evolutionary -using Optimization, OptimizationEvolutionary +using Optimization, OptimizationEvolutionary, Evolutionary function func(x, p=nothing)::Vector{Float64} f1 = (1.0 - x[1])^2 + 100.0 * (x[2] - x[1]^2)^2 # Rosenbrock function f2 = -20.0 * exp(-0.2 * sqrt(0.5 * (x[1]^2 + x[2]^2))) - exp(0.5 * (cos(2π * x[1]) + cos(2π * x[2]))) + exp(1) + 20.0 # Ackley function From 37357a19d17cb33c655f43046f56799d61ffe4f5 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 00:54:47 +0530 Subject: [PATCH 17/23] Update metaheuristics.md Added the package for the algorithms. --- docs/src/optimization_packages/metaheuristics.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 3a6f37fdf..2a7445ad2 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -106,12 +106,12 @@ npartitions = 100 weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) # Choose the algorithm as required. -alg1 = NSGA2() -alg2 = NSGA3() -alg3 = SPEA2() -alg4 = CCMO(NSGA2(N=100, p_m=0.001)) -alg5 = MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) -alg6 = SMS_EMOA() +alg1 = Metaheuristics.NSGA2() +alg2 = Metaheuristics.NSGA3() +alg3 = Metaheuristics.SPEA2() +alg4 = Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001)) +alg5 = Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) +alg6 = Metaheuristics.SMS_EMOA() # Solve the problem sol1 = solve(prob, alg1; maxiters = 100, use_initial = true) From beaa8a1f3eaf84ccf41371cb19f6541470037aa3 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 01:43:48 +0530 Subject: [PATCH 18/23] Update Project.toml Added evolutionary to the package. --- docs/Project.toml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/docs/Project.toml b/docs/Project.toml index 6b09dd362..1a2caad91 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -48,6 +48,7 @@ Tracker = "9f7883ad-71c0-57eb-9f7f-b5c9e6d3789c" Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f" BlackBoxOptim = "a134a8b2-14d6-55f6-9291-3336d3ab0209" Metaheuristics = "bcdb8e00-2c21-11e9-3065-2b553b22f898" +Evolutionary = "86b6b26d-c046-49b6-aa0b-5f0f74682bd6" [compat] AmplNLWriter = "1" @@ -97,4 +98,5 @@ Tracker = ">= 0.2" Zygote = ">= 0.5" BlackBoxOptim = "0.6" Metaheuristics = "3" +Evolutionary = "0.11" From 3ca4208d657fe8e6ddc0cb242ce76d34bed2ef80 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 02:04:28 +0530 Subject: [PATCH 19/23] Update metaheuristics.md updated algorithm call. --- .../optimization_packages/metaheuristics.md | 22 ++++++------------- 1 file changed, 7 insertions(+), 15 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 2a7445ad2..891d4514c 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -105,19 +105,11 @@ npartitions = 100 # reference points (Das and Dennis's method) weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) -# Choose the algorithm as required. -alg1 = Metaheuristics.NSGA2() -alg2 = Metaheuristics.NSGA3() -alg3 = Metaheuristics.SPEA2() -alg4 = Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001)) -alg5 = Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)) -alg6 = Metaheuristics.SMS_EMOA() - -# Solve the problem -sol1 = solve(prob, alg1; maxiters = 100, use_initial = true) -sol2 = solve(prob, alg2; maxiters = 100, use_initial = true) -sol3 = solve(prob, alg3; maxiters = 100, use_initial = true) -sol4 = solve(prob, alg4) -sol5 = solve(prob, alg5; maxiters = 100, use_initial = true) -sol6 = solve(prob, alg6; maxiters = 100, use_initial = true) +# Choose the algorithm and solve the problem +sol1 = solve(prob, NSGA2(); maxiters = 100, use_initial = true) +sol2 = solve(prob, NSGA3(); maxiters = 100, use_initial = true) +sol3 = solve(prob, SPEA2(); maxiters = 100, use_initial = true) +sol4 = solve(prob, CCMO(NSGA2(N=100, p_m=0.001))) +sol5 = solve(prob, MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) +sol6 = solve(prob, SMS_EMOA(); maxiters = 100, use_initial = true) ``` From abad46a73000bf380122988870c5c7ba41bed0b0 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 02:06:09 +0530 Subject: [PATCH 20/23] Update blackboxoptim.md Correction of changeing tuple to vector. --- docs/src/optimization_packages/blackboxoptim.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/src/optimization_packages/blackboxoptim.md b/docs/src/optimization_packages/blackboxoptim.md index ce17f3b1b..85f17cf93 100644 --- a/docs/src/optimization_packages/blackboxoptim.md +++ b/docs/src/optimization_packages/blackboxoptim.md @@ -69,7 +69,7 @@ sol = solve(prob, BBO_adaptive_de_rand_1_bin_radiuslimited(), maxiters = 100000, ``` ## Multi-objective optimization -The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a tuple of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: +The optimizer for Multi-Objective Optimization is `BBO_borg_moea()`. Your objective function should return a vector of the objective values and you should indicate the fitness scheme to be (typically) Pareto fitness and specify the number of objectives. Otherwise, the use is similar, here is an example: ```@example MOO-BBO using OptimizationBBO, Optimization, BlackBoxOptim From 9df333efb53bb96bff5df5aacf833a22401874f2 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Sun, 10 Nov 2024 10:09:50 +0530 Subject: [PATCH 21/23] Update metaheuristics.md corrected algorithm calls. --- docs/src/optimization_packages/metaheuristics.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/docs/src/optimization_packages/metaheuristics.md b/docs/src/optimization_packages/metaheuristics.md index 891d4514c..f95ad505a 100644 --- a/docs/src/optimization_packages/metaheuristics.md +++ b/docs/src/optimization_packages/metaheuristics.md @@ -106,10 +106,10 @@ npartitions = 100 weights = Metaheuristics.gen_ref_dirs(nobjectives, npartitions) # Choose the algorithm and solve the problem -sol1 = solve(prob, NSGA2(); maxiters = 100, use_initial = true) -sol2 = solve(prob, NSGA3(); maxiters = 100, use_initial = true) -sol3 = solve(prob, SPEA2(); maxiters = 100, use_initial = true) -sol4 = solve(prob, CCMO(NSGA2(N=100, p_m=0.001))) -sol5 = solve(prob, MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) -sol6 = solve(prob, SMS_EMOA(); maxiters = 100, use_initial = true) +sol1 = solve(prob, Metaheuristics.NSGA2(); maxiters = 100, use_initial = true) +sol2 = solve(prob, Metaheuristics.NSGA3(); maxiters = 100, use_initial = true) +sol3 = solve(prob, Metaheuristics.SPEA2(); maxiters = 100, use_initial = true) +sol4 = solve(prob, Metaheuristics.CCMO(NSGA2(N=100, p_m=0.001))) +sol5 = solve(prob, Metaheuristics.MOEAD_DE(weights, options=Options(debug=false, iterations = 250)); maxiters = 100, use_initial = true) +sol6 = solve(prob, Metaheuristics.SMS_EMOA(); maxiters = 100, use_initial = true) ``` From ce656a5c28bb52845b49acbc5ae15c6ad5cda4da Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:05:48 +0530 Subject: [PATCH 22/23] Update OptimizationBBO.jl Adding argument mapping for num_dimensions and fitness_scheme. --- lib/OptimizationBBO/src/OptimizationBBO.jl | 37 ++++++++++++++++++---- 1 file changed, 31 insertions(+), 6 deletions(-) diff --git a/lib/OptimizationBBO/src/OptimizationBBO.jl b/lib/OptimizationBBO/src/OptimizationBBO.jl index 8b44618f4..c39465edf 100644 --- a/lib/OptimizationBBO/src/OptimizationBBO.jl +++ b/lib/OptimizationBBO/src/OptimizationBBO.jl @@ -55,6 +55,8 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; abstol::Union{Number, Nothing} = nothing, reltol::Union{Number, Nothing} = nothing, verbose::Bool = false, + num_dimensions::Union{Number, Nothing} = nothing, + fitness_scheme::Union{String, Nothing} = nothing, kwargs...) if !isnothing(reltol) @warn "common reltol is currently not used by $(opt)" @@ -102,6 +104,16 @@ function __map_optimizer_args(prob::Optimization.OptimizationCache, opt::BBO; mapped_args = (; mapped_args..., TraceMode = :silent) end + if isa(prob.f, MultiObjectiveOptimizationFunction) + if isnothing(num_dimensions) && isnothing(fitness_scheme) + mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true)) + elseif isnothing(num_dimensions) + mapped_args = (; mapped_args..., NumDimensions = 2, FitnessScheme = fitness_scheme) + elseif isnothing(fitness_scheme) + mapped_args = (; mapped_args..., NumDimensions = num_dimensions, FitnessScheme = BlackBoxOptim.ParetoFitnessScheme{2}(is_minimizing=true)) + end + end + return mapped_args end @@ -178,12 +190,25 @@ function SciMLBase.__solve(cache::Optimization.OptimizationCache{ _loss = θ -> cache.f(θ, cache.p) end - opt_args = __map_optimizer_args(cache, cache.opt; - callback = cache.callback === Optimization.DEFAULT_CALLBACK ? - nothing : _cb, - cache.solver_args..., - maxiters = maxiters, - maxtime = maxtime) + if isa(cache.f, MultiObjectiveOptimizationFunction) + opt_args = __map_optimizer_args(cache, cache.opt; + callback = cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA ? + nothing : _cb, + cache.solver_args..., + maxiters = maxiters, + maxtime = maxtime, + num_dimensions = isnothing(cache.num_dimensions) ? nothing : cache.num_dimensions, + fitness_scheme = isnothing(cache.fitness_scheme) ? nothing : cache.fitness_scheme) + else + opt_args = __map_optimizer_args(cache, cache.opt; + callback = cache.callback === Optimization.DEFAULT_CALLBACK && + cache.data === Optimization.DEFAULT_DATA ? + nothing : _cb, + cache.solver_args..., + maxiters = maxiters, + maxtime = maxtime) + end opt_setup = BlackBoxOptim.bbsetup(_loss; opt_args...) From 55a4fb98da9050e8f48d6992067c55fcace38b37 Mon Sep 17 00:00:00 2001 From: Paras Puneet Singh <136245940+ParasPuneetSingh@users.noreply.github.com> Date: Thu, 30 Jan 2025 00:08:17 +0530 Subject: [PATCH 23/23] Update runtests.jl syntax change for num_dimensions and fitness_scheme passing in solve(). --- lib/OptimizationBBO/test/runtests.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/lib/OptimizationBBO/test/runtests.jl b/lib/OptimizationBBO/test/runtests.jl index 20b23da19..1a6938d48 100644 --- a/lib/OptimizationBBO/test/runtests.jl +++ b/lib/OptimizationBBO/test/runtests.jl @@ -107,8 +107,8 @@ using Test mof_1 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_1, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_1 = Optimization.OptimizationProblem(mof_1, u0; lb = lb, ub = ub) - sol_1 = solve(prob_1, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_1 = solve(prob_1, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_1 ≠ nothing println("Solution for Sphere and Rastrigin: ", sol_1) @@ -160,8 +160,8 @@ using Test mof_2 = MultiObjectiveOptimizationFunction{false}(multi_obj_func_2, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_2 = Optimization.OptimizationProblem(mof_2, u0; lb = lb, ub = ub) - sol_2 = solve(prob_2, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_2 = solve(prob_2, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_2 ≠ nothing println("Solution for Rosenbrock and Ackley: ", sol_2) @@ -180,8 +180,8 @@ using Test mof_3 = SciMLBase.MultiObjectiveOptimizationFunction{false}(multi_obj_func_3, SciMLBase.NoAD(); cost_prototype=zeros(2)) prob_3 = Optimization.OptimizationProblem(mof_3, u0; lb = lb, ub = ub) - sol_3 = solve(prob_3, opt, NumDimensions = 2, - FitnessScheme = ParetoFitnessScheme{2}(is_minimizing = true)) + sol_3 = solve(prob_3, opt, num_dimensions = 2, + fitness_scheme = ParetoFitnessScheme{2}(is_minimizing = true)) @test sol_3 ≠ nothing println("Solution for ZDT1: ", sol_3)