Optim.jl icon indicating copy to clipboard operation
Optim.jl copied to clipboard

maximize(only_fg!(...), ...) doesn't work

Open cossio opened this issue 4 years ago • 2 comments

using LinearAlgebra
using Optim: Optim, only_fg!, maximize

function fg!(F, G, x)
    if G !== nothing
        G .= -x
    end
    if F !== nothing
        return -dot(x, x) / 2
    end
end
sol = maximize(only_fg!(fg!), randn(5), LBFGS())

MethodError: objects of type NLSolversBase.InplaceObjective{Nothing, typeof(fg!), Nothing, Nothing, Nothing} are not callable

Stacktrace: [1] (::Optim.var"#133#134"{NLSolversBase.InplaceObjective{Nothing, typeof(fg!), Nothing, Nothing, Nothing}})(x::Vector{Float64}) @ Optim ~/.julia/packages/Optim/3K7JI/src/maximize.jl:29 [2] finite_difference_gradient!(df::Vector{Float64}, f::Optim.var"#133#134"{NLSolversBase.InplaceObjective{Nothing, typeof(fg!), Nothing, Nothing, Nothing}}, x::Vector{Float64}, cache::FiniteDiff.GradientCache{Nothing, Nothing, Nothing, Vector{Float64}, Val{:central}(), Float64, Val{true}()}; relstep::Float64, absstep::Float64, dir::Bool) @ FiniteDiff ~/.julia/packages/FiniteDiff/msXcU/src/gradients.jl:273 [3] finite_difference_gradient!(df::Vector{Float64}, f::Function, x::Vector{Float64}, cache::FiniteDiff.GradientCache{Nothing, Nothing, Nothing, Vector{Float64}, Val{:central}(), Float64, Val{true}()}) @ FiniteDiff ~/.julia/packages/FiniteDiff/msXcU/src/gradients.jl:224 [4] (::NLSolversBase.var"#g!#15"{Optim.var"#133#134"{NLSolversBase.InplaceObjective{Nothing, typeof(fg!), Nothing, Nothing, Nothing}}, FiniteDiff.GradientCache{Nothing, Nothing, Nothing, Vector{Float64}, Val{:central}(), Float64, Val{true}()}})(storage::Vector{Float64}, x::Vector{Float64}) @ NLSolversBase ~/.julia/packages/NLSolversBase/GRQ1x/src/objective_types/oncedifferentiable.jl:57 [5] (::NLSolversBase.var"#fg!#16"{Optim.var"#133#134"{NLSolversBase.InplaceObjective{Nothing, typeof(fg!), Nothing, Nothing, Nothing}}})(storage::Vector{Float64}, x::Vector{Float64}) @ NLSolversBase ~/.julia/packages/NLSolversBase/GRQ1x/src/objective_types/oncedifferentiable.jl:61 [6] value_gradient!!(obj::OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, x::Vector{Float64}) @ NLSolversBase ~/.julia/packages/NLSolversBase/GRQ1x/src/interface.jl:82 [7] initial_state(method::LBFGS{Nothing, LineSearches.InitialStatic{Float64}, LineSearches.HagerZhang{Float64, Base.RefValue{Bool}}, Optim.var"#17#19"}, options::Optim.Options{Float64, Nothing}, d::OnceDifferentiable{Float64, Vector{Float64}, Vector{Float64}}, initial_x::Vector{Float64}) @ Optim ~/.julia/packages/Optim/3K7JI/src/multivariate/solvers/first_order/l_bfgs.jl:164 [8] optimize @ ~/.julia/packages/Optim/3K7JI/src/multivariate/optimize/optimize.jl:35 [inlined] [9] #optimize#87 @ ~/.julia/packages/Optim/3K7JI/src/multivariate/optimize/interface.jl:142 [inlined] [10] optimize @ ~/.julia/packages/Optim/3K7JI/src/multivariate/optimize/interface.jl:141 [inlined] [11] #maximize#132 @ ~/.julia/packages/Optim/3K7JI/src/maximize.jl:30 [inlined] [12] maximize (repeats 2 times) @ ~/.julia/packages/Optim/3K7JI/src/maximize.jl:29 [inlined] [13] top-level scope @ In[35]:12 [14] eval @ ./boot.jl:360 [inlined] [15] include_string(mapexpr::typeof(REPL.softscope), mod::Module, code::String, filename::String) @ Base ./loading.jl:1116

cossio avatar Oct 15 '21 16:10 cossio

In contrast, using optimize instead of maximize works:

using LinearAlgebra
using Optim: Optim, only_fg!, maximize

function fg!(F, G, x)
    if G !== nothing
        G .= x
    end
    if F !== nothing
        return dot(x, x) / 2
    end
end
sol = optimize(only_fg!(fg!), randn(5), LBFGS()) # works fine

cossio avatar Oct 15 '21 16:10 cossio

True, if you look at the code, it only supports f, f, g, etc input... I guess it could be added https://github.com/JuliaNLSolvers/Optim.jl/blob/master/src/maximize.jl

pkofod avatar Oct 20 '21 20:10 pkofod