MLJLinearModels.jl
MLJLinearModels.jl copied to clipboard
Test proxgrad with gamma=0
Following discussion in https://github.com/JuliaAI/MLJLinearModels.jl/pull/138 (https://github.com/JuliaAI/MLJLinearModels.jl/pull/138#issuecomment-1411411410) there shouldn't be an error if ISTA/FISTA is used with gamma=0 but it'd be good to check.
Might be even better to just have a shortcut if that's the case so that the prox operator is just the identity (and we just have some kind of gradient descent).
Certainly what is true is that FISTA cannot be used with the option penalty=:l2, in any model that supports that option:
using MLJLinearModels
using MLJBase
models_with_penalty_option = filter(models("MLJLinearModels")) do m
:penalty in m.hyperparameters
end
# (name = HuberRegressor, package_name = MLJLinearModels, ... )
# (name = LADRegressor, package_name = MLJLinearModels, ... )
# (name = LogisticClassifier, package_name = MLJLinearModels, ... )
# (name = MultinomialClassifier, package_name = MLJLinearModels, ... )
# (name = QuantileRegressor, package_name = MLJLinearModels, ... )
# (name = RobustRegressor, package_name = MLJLinearModels, ... )
model = RobustRegressor(penalty=:l2, solver=FISTA())
MLJBase.fit(model, 0, make_regression()...)
# ERROR: MethodError: no method matching smooth_fg!(::GeneralizedLinearRegression{RobustLoss{HuberRho{0.1}}, ScaledPenalty{L2Penalty}}, ::Matrix{Float64}, ::Vector{Float64}, ::NamedTuple{(:n, :n2, :n3, :p, :dims), Tuple{Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Tuple{Int64, Int64, Int64}}})
# Closest candidates are:
# smooth_fg!(::GeneralizedLinearRegression{L2Loss, <:Union{CompositePenalty, ScaledPenalty{L1Penalty}}}, ::Any, ::Any, ::Any) at ~/.julia/packages/MLJLinearModels/TXgHx/src/glr/d_l2loss.jl:63
# smooth_fg!(::GeneralizedLinearRegression{LogisticLoss, <:Union{CompositePenalty, ScaledPenalty{L1Penalty}}}, ::Any, ::Any, ::Any) at ~/.julia/packages/MLJLinearModels/TXgHx/src/glr/d_logistic.jl:136
# smooth_fg!(::GeneralizedLinearRegression{<:MultinomialLoss, <:Union{CompositePenalty, ScaledPenalty{L1Penalty}}}, ::Any, ::Any, ::Any) at ~/.julia/packages/MLJLinearModels/TXgHx/src/glr/d_logistic.jl:253
# ...
# Stacktrace:
# [1] _fit(glr::GeneralizedLinearRegression{RobustLoss{HuberRho{0.1}}, ScaledPenalty{L2Penalty}}, solver::ProxGrad, X::Matrix{Float64}, y::Vector{Float64}, scratch::NamedTuple{(:n, :n2, :n3, :p, :dims), Tuple{Vector{Float64}, Vector{Float64}, Vector{Float64}, Vector{Float64}, Tuple{Int64, Int64, Int64}}})
# @ MLJLinearModels ~/.julia/packages/MLJLinearModels/TXgHx/src/fit/proxgrad.jl:23
# [2] fit(glr::GeneralizedLinearRegression{RobustLoss{HuberRho{0.1}}, ScaledPenalty{L2Penalty}}, X::Matrix{Float64}, y::Vector{Float64}; solver::ProxGrad)
# @ MLJLinearModels ~/.julia/packages/MLJLinearModels/TXgHx/src/fit/default.jl:41
# [3] fit(m::RobustRegressor, verb::Int64, X::Tables.MatrixTable{Matrix{Float64}}, y::Vector{Float64})
# @ MLJLinearModels ~/.julia/packages/MLJLinearModels/TXgHx/src/mlj/interface.jl:41
# [4] top-level scope
# @ REPL[53]:1
All the listed models throw the same error.