From 03c27085ee5db6c582337f8a8b093d8f9ad193f5 Mon Sep 17 00:00:00 2001 From: Vaibhav Dixit Date: Wed, 11 Sep 2024 13:46:30 -0400 Subject: [PATCH] more fixes --- docs/src/index.md | 4 ---- lib/OptimizationManopt/src/OptimizationManopt.jl | 12 +++++++++--- lib/OptimizationOptimJL/test/runtests.jl | 2 +- lib/OptimizationPRIMA/src/OptimizationPRIMA.jl | 13 +++++++++---- src/sophia.jl | 2 +- test/minibatch.jl | 6 +++--- 6 files changed, 23 insertions(+), 16 deletions(-) diff --git a/docs/src/index.md b/docs/src/index.md index a905e5439..c1cda2d47 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -203,20 +203,16 @@ versioninfo() # hide ```@raw html ``` - ```@raw html
A more complete overview of all dependencies and their versions is also provided. ``` - ```@example using Pkg # hide Pkg.status(; mode = PKGMODE_MANIFEST) # hide ``` - ```@raw html
``` - ```@eval using TOML using Markdown diff --git a/lib/OptimizationManopt/src/OptimizationManopt.jl b/lib/OptimizationManopt/src/OptimizationManopt.jl index 7ec58d004..7a2027920 100644 --- a/lib/OptimizationManopt/src/OptimizationManopt.jl +++ b/lib/OptimizationManopt/src/OptimizationManopt.jl @@ -13,9 +13,6 @@ internal state. abstract type AbstractManoptOptimizer end SciMLBase.supports_opt_cache_interface(opt::AbstractManoptOptimizer) = true -SciMLBase.requiresgradient(opt::Union{GradientDescentOptimizer, ConjugateGradientDescentOptimizer, QuasiNewtonOptimizer, ConvexBundleOptimizer, FrankWolfeOptimizer}) = true -SciMLBase.requireshessian(opt::Union{AdaptiveRegularizationCubicOptimizer, TrustRegionsOptimizer}) = true - function __map_optimizer_args!(cache::OptimizationCache, opt::AbstractManoptOptimizer; @@ -329,6 +326,15 @@ function call_manopt_optimizer(M::ManifoldsBase.AbstractManifold, end ## Optimization.jl stuff +function SciMLBase.requiresgradient(opt::Union{ + GradientDescentOptimizer, ConjugateGradientDescentOptimizer, + QuasiNewtonOptimizer, ConvexBundleOptimizer, FrankWolfeOptimizer}) + true +end +function SciMLBase.requireshessian(opt::Union{ + AdaptiveRegularizationCubicOptimizer, TrustRegionsOptimizer}) + true +end function build_loss(f::OptimizationFunction, prob, cb) function (::AbstractManifold, θ) diff --git a/lib/OptimizationOptimJL/test/runtests.jl b/lib/OptimizationOptimJL/test/runtests.jl index 06c9c10dc..20bb0176f 100644 --- a/lib/OptimizationOptimJL/test/runtests.jl +++ b/lib/OptimizationOptimJL/test/runtests.jl @@ -1,6 +1,6 @@ using OptimizationOptimJL, OptimizationOptimJL.Optim, Optimization, ForwardDiff, Zygote, ReverseDiff. - Random, ModelingToolkit, Optimization.OptimizationBase.DifferentiationInterface +Random, ModelingToolkit, Optimization.OptimizationBase.DifferentiationInterface using Test struct CallbackTester diff --git a/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl b/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl index e928b4401..a9ce1f0f5 100644 --- a/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl +++ b/lib/OptimizationPRIMA/src/OptimizationPRIMA.jl @@ -15,8 +15,7 @@ SciMLBase.supports_opt_cache_interface(::PRIMASolvers) = true SciMLBase.allowsconstraints(::Union{LINCOA, COBYLA}) = true SciMLBase.allowsbounds(opt::Union{BOBYQA, LINCOA, COBYLA}) = true SciMLBase.requiresconstraints(opt::COBYLA) = true -SciMLBase.requiresgradient(opt::Union{BOBYQA, LINCOA, COBYLA}) = true -SciMLBase.requiresconsjac(opt::Union{LINCOA, COBYLA}) = true +SciMLBase.requiresconsjac(opt::COBYLA) = true SciMLBase.requiresconshess(opt::COBYLA) = true function Optimization.OptimizationCache(prob::SciMLBase.OptimizationProblem, @@ -34,8 +33,14 @@ function Optimization.OptimizationCache(prob::SciMLBase.OptimizationProblem, throw("We evaluate the jacobian and hessian of the constraints once to automatically detect linear and nonlinear constraints, please provide a valid AD backend for using COBYLA.") else - f = Optimization.instantiate_function( - prob.f, reinit_cache.u0, prob.f.adtype, reinit_cache.p, num_cons) + if opt isa COBYLA + f = Optimization.instantiate_function( + prob.f, reinit_cache.u0, prob.f.adtype, reinit_cache.p, num_cons, + cons_j = true, cons_h = true) + else + f = Optimization.instantiate_function( + prob.f, reinit_cache.u0, prob.f.adtype, reinit_cache.p, num_cons) + end end return Optimization.OptimizationCache(f, reinit_cache, prob.lb, prob.ub, prob.lcons, diff --git a/src/sophia.jl b/src/sophia.jl index 00b6b9ebe..cd17e0f69 100644 --- a/src/sophia.jl +++ b/src/sophia.jl @@ -78,7 +78,7 @@ function SciMLBase.__solve(cache::OptimizationCache{ for _ in 1:maxiters for (i, d) in enumerate(data) f.grad(gₜ, θ, d) - x = cache.f(θ, cache.p, d...) + x = cache.f(θ, d) opt_state = Optimization.OptimizationState(; iter = i, u = θ, objective = first(x), diff --git a/test/minibatch.jl b/test/minibatch.jl index 2a755e36f..a5317e4a3 100644 --- a/test/minibatch.jl +++ b/test/minibatch.jl @@ -59,9 +59,9 @@ optfun = OptimizationFunction(loss_adjoint, optprob = OptimizationProblem(optfun, pp, train_loader) sol = Optimization.solve(optprob, -Optimization.Sophia(; η = 0.5, - λ = 0.0), -maxiters = 1000) + Optimization.Sophia(; η = 0.5, + λ = 0.0), + maxiters = 1000) @test 10res1.objective < l1 optfun = OptimizationFunction(loss_adjoint,