Skip to content
1 change: 1 addition & 0 deletions .github/workflows/CI.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ jobs:
- OptimizationCMAEvolutionStrategy
- OptimizationEvolutionary
- OptimizationGCMAES
- OptimizationLBFGSB
- OptimizationIpopt
- OptimizationManopt
- OptimizationMetaheuristics
Expand Down
7 changes: 3 additions & 4 deletions Project.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name = "Optimization"
uuid = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
version = "5.0.0"
version = "4.8.0"

[deps]
ADTypes = "47edcb42-4c32-4615-8424-f2b9edc5f35b"
Expand Down Expand Up @@ -39,7 +39,6 @@ Flux = "0.13, 0.14, 0.15, 0.16"
ForwardDiff = "0.10, 1"
Ipopt = "1"
IterTools = "1.3"
LBFGSB = "0.4.1"
LinearAlgebra = "1.10"
Logging = "1.10"
LoggingExtras = "0.4, 1"
Expand All @@ -48,6 +47,7 @@ MLUtils = "0.4"
ModelingToolkit = "10.23"
Mooncake = "0.4.138"
Optim = ">= 1.4.1"
Optimisers = ">= 0.2.5"
OptimizationBase = "2"
OptimizationMOI = "0.5"
OptimizationOptimJL = "0.4"
Expand All @@ -56,7 +56,7 @@ OrdinaryDiffEqTsit5 = "1"
Pkg = "1"
Printf = "1.10"
ProgressLogging = "0.1"
Random = "1.10"
Random = "1.10"
Reexport = "1.2"
ReverseDiff = "1"
SafeTestsets = "0.1"
Expand All @@ -67,7 +67,6 @@ Symbolics = "6"
TerminalLoggers = "0.1"
Test = "1.10"
Tracker = "0.2"
Optimisers = ">= 0.2.5"
Zygote = "0.6, 0.7"
julia = "1.10"

Expand Down
28 changes: 28 additions & 0 deletions lib/OptimizationLBFGSB/Project.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
name = "OptimizationLBFGSB"
uuid = "22f7324a-a79d-40f2-bebe-3af60c77bd15"
authors = ["paramthakkar123 <paramthakkar864@gmail.com>"]
version = "0.1.0"

[deps]
DocStringExtensions = "ffbed154-4ef7-542d-bbb7-c09d3a79fcae"
LBFGSB = "5be7bae1-8223-5378-bac3-9e7378a2f6e6"
Optimization = "7f7a1694-90dd-40f0-9382-eb1efda571ba"
OptimizationBase = "bca83a33-5cc9-4baa-983d-23429ab6bcbb"

[extras]
ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210"
MLUtils = "f1d291b0-491e-4a28-83b9-f70985020b54"
Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40"
Zygote = "e88e6eb3-aa80-5325-afca-941959d7151f"

[compat]
DocStringExtensions = "0.9.5"
ForwardDiff = "1.0.1"
LBFGSB = "0.4.1"
MLUtils = "0.4.8"
Optimization = "4.4.0"
OptimizationBase = "2.10.0"
Zygote = "0.7.10"

[targets]
test = ["Test", "ForwardDiff", "MLUtils", "Zygote"]
46 changes: 29 additions & 17 deletions src/lbfgsb.jl → ...imizationLBFGSB/src/OptimizationLBFGSB.jl
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
using Optimization.SciMLBase, LBFGSB
module OptimizationLBFGSB

using Optimization
using DocStringExtensions
import LBFGSB as LBFGSBJL
using OptimizationBase.SciMLBase: OptimizationStats, OptimizationFunction
using OptimizationBase: ReturnCode
using OptimizationBase.LinearAlgebra: norm
using Optimization: deduce_retcode

"""
$(TYPEDEF)
Expand All @@ -12,7 +20,7 @@ References
- C. Zhu, R. H. Byrd and J. Nocedal. L-BFGS-B: Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (1997), ACM Transactions on Mathematical Software, Vol 23, Num. 4, pp. 550 - 560.
- J.L. Morales and J. Nocedal. L-BFGS-B: Remark on Algorithm 778: L-BFGS-B, FORTRAN routines for large scale bound constrained optimization (2011), to appear in ACM Transactions on Mathematical Software.
"""
@kwdef struct LBFGS
@kwdef struct LBFGSB
m::Int = 10
τ = 0.5
γ = 10.0
Expand All @@ -24,21 +32,21 @@ References
end

@static if isdefined(SciMLBase, :supports_opt_cache_interface)
SciMLBase.supports_opt_cache_interface(::LBFGS) = true
SciMLBase.supports_opt_cache_interface(::LBFGSB) = true
end
@static if isdefined(OptimizationBase, :supports_opt_cache_interface)
OptimizationBase.supports_opt_cache_interface(::LBFGS) = true
OptimizationBase.supports_opt_cache_interface(::LBFGSB) = true
end
SciMLBase.allowsbounds(::LBFGS) = true
SciMLBase.requiresgradient(::LBFGS) = true
SciMLBase.allowsconstraints(::LBFGS) = true
SciMLBase.requiresconsjac(::LBFGS) = true
SciMLBase.allowsbounds(::LBFGSB) = true
SciMLBase.requiresgradient(::LBFGSB) = true
SciMLBase.allowsconstraints(::LBFGSB) = true
SciMLBase.requiresconsjac(::LBFGSB) = true

function task_message_to_string(task::Vector{UInt8})
return String(task)
end

function __map_optimizer_args(cache::Optimization.OptimizationCache, opt::LBFGS;
function __map_optimizer_args(cache::OptimizationCache, opt::LBFGSB;
callback = nothing,
maxiters::Union{Number, Nothing} = nothing,
maxtime::Union{Number, Nothing} = nothing,
Expand Down Expand Up @@ -91,7 +99,7 @@ function SciMLBase.__solve(cache::OptimizationCache{
UC,
S,
O <:
LBFGS,
LBFGSB,
D,
P,
C
Expand Down Expand Up @@ -130,7 +138,7 @@ function SciMLBase.__solve(cache::OptimizationCache{
cons_tmp[eq_inds] .= cons_tmp[eq_inds] - cache.lcons[eq_inds]
cons_tmp[ineq_inds] .= cons_tmp[ineq_inds] .- cache.ucons[ineq_inds]
opt_state = Optimization.OptimizationState(
u = θ, objective = x[1], p = cache.p, iter = iter_count[])
u = θ, objective = x[1])
if cache.callback(opt_state, x...)
error("Optimization halted by callback.")
end
Expand Down Expand Up @@ -172,11 +180,11 @@ function SciMLBase.__solve(cache::OptimizationCache{

if cache.lb === nothing
optimizer,
bounds = LBFGSB._opt_bounds(
bounds = LBFGSBJL._opt_bounds(
n, cache.opt.m, [-Inf for i in 1:n], [Inf for i in 1:n])
else
optimizer,
bounds = LBFGSB._opt_bounds(
bounds = LBFGSBJL._opt_bounds(
n, cache.opt.m, solver_kwargs.lb, solver_kwargs.ub)
end

Expand Down Expand Up @@ -209,7 +217,7 @@ function SciMLBase.__solve(cache::OptimizationCache{
end
end

stats = Optimization.OptimizationStats(; iterations = maxiters,
stats = OptimizationStats(; iterations = maxiters,
time = 0.0, fevals = maxiters, gevals = maxiters)
return SciMLBase.build_solution(
cache, cache.opt, res[2], cache.f(res[2], cache.p)[1],
Expand All @@ -220,7 +228,7 @@ function SciMLBase.__solve(cache::OptimizationCache{
x = cache.f(θ, cache.p)
iter_count[] += 1
opt_state = Optimization.OptimizationState(
u = θ, objective = x[1], p = cache.p, iter = iter_count[])
u = θ, objective = x[1])
if cache.callback(opt_state, x...)
error("Optimization halted by callback.")
end
Expand All @@ -231,11 +239,11 @@ function SciMLBase.__solve(cache::OptimizationCache{

if cache.lb === nothing
optimizer,
bounds = LBFGSB._opt_bounds(
bounds = LBFGSBJL._opt_bounds(
n, cache.opt.m, [-Inf for i in 1:n], [Inf for i in 1:n])
else
optimizer,
bounds = LBFGSB._opt_bounds(
bounds = LBFGSBJL._opt_bounds(
n, cache.opt.m, solver_kwargs.lb, solver_kwargs.ub)
end

Expand All @@ -261,3 +269,7 @@ function SciMLBase.__solve(cache::OptimizationCache{
retcode = opt_ret, original = optimizer)
end
end

export LBFGSB

end
57 changes: 57 additions & 0 deletions lib/OptimizationLBFGSB/test/runtests.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
using OptimizationBase
using OptimizationBase: ReturnCode
using OptimizationBase.SciMLBase: OptimizationFunction, OptimizationProblem
using ForwardDiff, Zygote
using OptimizationLBFGSB
using MLUtils
using LBFGSB
using Test

@testset "OptimizationLBFGSB.jl" begin
x0 = zeros(2)
rosenbrock(x, p = nothing) = (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2
l1 = rosenbrock(x0)

optf = OptimizationFunction(rosenbrock, OptimizationBase.AutoForwardDiff())
prob = OptimizationProblem(optf, x0)
@time res = solve(prob, OptimizationLBFGSB.LBFGSB(), maxiters = 100)
@test res.retcode == ReturnCode.Success

prob = OptimizationProblem(optf, x0, lb = [-1.0, -1.0], ub = [1.0, 1.0])
@time res = solve(prob, OptimizationLBFGSB.LBFGSB(), maxiters = 100)
@test res.retcode == ReturnCode.Success

function con2_c(res, x, p)
res .= [x[1]^2 + x[2]^2, (x[2] * sin(x[1]) + x[1]) - 5]
end

optf = OptimizationFunction(rosenbrock, OptimizationBase.AutoZygote(), cons = con2_c)
prob = OptimizationProblem(optf, x0, lcons = [1.0, -Inf],
ucons = [1.0, 0.0], lb = [-1.0, -1.0],
ub = [1.0, 1.0])
@time res = solve(prob, OptimizationLBFGSB.LBFGSB(), maxiters = 100)
@test res.retcode == SciMLBase.ReturnCode.Success

x0 = (-pi):0.001:pi
y0 = sin.(x0)
data = MLUtils.DataLoader((x0, y0), batchsize = 126)
function loss(coeffs, data)
ypred = [evalpoly(data[1][i], coeffs) for i in eachindex(data[1])]
return sum(abs2, ypred .- data[2])
end

function cons1(res, coeffs, p = nothing)
res[1] = coeffs[1] * coeffs[5] - 1
return nothing
end

optf = OptimizationFunction(loss, AutoSparseForwardDiff(), cons = cons1)
callback = (st, l) -> (@show l; return false)

initpars = rand(5)
l0 = optf(initpars, (x0, y0))
prob = OptimizationProblem(optf, initpars, (x0, y0), lcons = [-Inf], ucons = [0.5],
lb = [-10.0, -10.0, -10.0, -10.0, -10.0], ub = [10.0, 10.0, 10.0, 10.0, 10.0])
opt1 = solve(prob, OptimizationLBFGSB.LBFGSB(), maxiters = 1000, callback = callback)
@test opt1.objective < l0
end
1 change: 0 additions & 1 deletion src/Optimization.jl
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@ export ObjSense, MaxSense, MinSense

include("utils.jl")
include("state.jl")
include("lbfgsb.jl")
include("sophia.jl")

export solve
Expand Down
29 changes: 0 additions & 29 deletions test/native.jl
Original file line number Diff line number Diff line change
@@ -1,31 +1,6 @@
using Optimization
using ForwardDiff, Zygote, ReverseDiff, FiniteDiff
using Test

x0 = zeros(2)
rosenbrock(x, p = nothing) = (1 - x[1])^2 + 100 * (x[2] - x[1]^2)^2
l1 = rosenbrock(x0)

optf = OptimizationFunction(rosenbrock, AutoForwardDiff())
prob = OptimizationProblem(optf, x0)
@time res = solve(prob, Optimization.LBFGS(), maxiters = 100)
@test res.retcode == Optimization.SciMLBase.ReturnCode.Success

prob = OptimizationProblem(optf, x0, lb = [-1.0, -1.0], ub = [1.0, 1.0])
@time res = solve(prob, Optimization.LBFGS(), maxiters = 100)
@test res.retcode == Optimization.SciMLBase.ReturnCode.Success

function con2_c(res, x, p)
res .= [x[1]^2 + x[2]^2, (x[2] * sin(x[1]) + x[1]) - 5]
end

optf = OptimizationFunction(rosenbrock, AutoZygote(), cons = con2_c)
prob = OptimizationProblem(optf, x0, lcons = [1.0, -Inf],
ucons = [1.0, 0.0], lb = [-1.0, -1.0],
ub = [1.0, 1.0])
@time res = solve(prob, Optimization.LBFGS(), maxiters = 100)
@test res.retcode == SciMLBase.ReturnCode.Success

using MLUtils, OptimizationOptimisers

x0 = (-pi):0.001:pi
Expand All @@ -46,10 +21,6 @@ callback = (st, l) -> (@show l; return false)

initpars = rand(5)
l0 = optf(initpars, (x0, y0))
prob = OptimizationProblem(optf, initpars, (x0, y0), lcons = [-Inf], ucons = [0.5],
lb = [-10.0, -10.0, -10.0, -10.0, -10.0], ub = [10.0, 10.0, 10.0, 10.0, 10.0])
opt1 = solve(prob, Optimization.LBFGS(), maxiters = 1000, callback = callback)
@test opt1.objective < l0

optf1 = OptimizationFunction(loss, AutoSparseForwardDiff())
prob1 = OptimizationProblem(optf1, rand(5), data)
Expand Down
Loading