Skip to content

Commit

Permalink
mtk doesn't have lagh
Browse files Browse the repository at this point in the history
  • Loading branch information
Vaibhavdixit02 committed Sep 17, 2024
1 parent 6654e4b commit 5a177c6
Show file tree
Hide file tree
Showing 3 changed files with 15 additions and 6 deletions.
15 changes: 12 additions & 3 deletions lib/OptimizationMOI/src/nlp.jl
Original file line number Diff line number Diff line change
Expand Up @@ -113,8 +113,16 @@ function MOIOptimizationNLPCache(prob::OptimizationProblem,
reinit_cache = OptimizationBase.ReInitCache(prob.u0, prob.p) # everything that can be changed via `reinit`

num_cons = prob.ucons === nothing ? 0 : length(prob.ucons)
f = Optimization.instantiate_function(prob.f, reinit_cache, prob.f.adtype, num_cons;
g = true, h = true, cons_j = true, cons_vjp = true, lag_h = true)
if prob.f.adtype isa ADTypes.AutoSymbolics || (prob.f.adtype isa ADTypes.AutoSparse &&
prob.f.adtype.dense_ad isa ADTypes.AutoSymbolics)
f = Optimization.instantiate_function(
prob.f, reinit_cache, prob.f.adtype, num_cons;
g = true, h = true, cons_j = true, cons_h = true)
else
f = Optimization.instantiate_function(
prob.f, reinit_cache, prob.f.adtype, num_cons;
g = true, h = true, cons_j = true, cons_vjp = true, lag_h = true)
end
T = eltype(prob.u0)
n = length(prob.u0)

Expand Down Expand Up @@ -290,7 +298,8 @@ function MOI.eval_constraint_jacobian(evaluator::MOIOptimizationNLPEvaluator, j,
return
end

function MOI.eval_constraint_jacobian_product(evaluator::MOIOptimizationNLPEvaluator, y, x, w)
function MOI.eval_constraint_jacobian_product(
evaluator::MOIOptimizationNLPEvaluator, y, x, w)
if evaluator.f.cons_jvp !== nothing
evaluator.f.cons_jvp(y, x, w)

Expand Down
2 changes: 1 addition & 1 deletion lib/OptimizationMultistartOptimization/test/runtests.jl
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
using Pkg;
Pkg.develop(path = joinpath(@__DIR__, "../../", "OptimizationNLopt"));
using OptimizationMultistartOptimization, Optimization, ForwardDiff, OptimizationNLopt
using Test
using Test, ReverseDiff

@testset "OptimizationMultistartOptimization.jl" begin
rosenbrock(x, p) = (p[1] - x[1])^2 + p[2] * (x[2] - x[1]^2)^2
Expand Down
4 changes: 2 additions & 2 deletions test/diffeqfluxtests.jl
Original file line number Diff line number Diff line change
Expand Up @@ -99,12 +99,12 @@ prob = Optimization.OptimizationProblem(optprob, pp)
result_neuralode = Optimization.solve(prob,
OptimizationOptimisers.ADAM(), callback = callback,
maxiters = 300)
@test result_neuralode.objective loss_neuralode(result_neuralode.u)[1] rtol = 1e-2
@test result_neuralode.objectiveloss_neuralode(result_neuralode.u)[1] rtol=1e-2

prob2 = remake(prob, u0 = result_neuralode.u)
result_neuralode2 = Optimization.solve(prob2,
BFGS(initial_stepnorm = 0.0001),
callback = callback,
maxiters = 100)
@test result_neuralode2.objective loss_neuralode(result_neuralode2.u)[1] rtol = 1e-2
@test result_neuralode2.objectiveloss_neuralode(result_neuralode2.u)[1] rtol=1e-2
@test result_neuralode2.objective < 10

0 comments on commit 5a177c6

Please sign in to comment.