From 608ca252572b4152f4c95afbe0bd09a5f63a31a1 Mon Sep 17 00:00:00 2001 From: Andrew Date: Sat, 22 Feb 2025 12:38:16 -0500 Subject: [PATCH 01/13] implement dual of parameter anywhere --- src/NonLinearProgram/NonLinearProgram.jl | 31 +++++++++++++++++++++--- src/NonLinearProgram/nlp_utilities.jl | 16 ++++++++++-- src/moi_wrapper.jl | 20 +++++++++++++++ test/nlp_program.jl | 4 +++ 4 files changed, 66 insertions(+), 5 deletions(-) diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index 6b816b4f..2130a7b8 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -27,10 +27,12 @@ end Base.@kwdef struct ForwCache primal_Δs::Dict{MOI.VariableIndex,Float64} # Sensitivity for primal variables (indexed by VariableIndex) dual_Δs::Vector{Float64} # Sensitivity for constraints and bounds (indexed by ConstraintIndex) + dual_p::Dict{MOI.ConstraintIndex,Float64} # Dual wrt parameters end Base.@kwdef struct ReverseCache Δp::Vector{Float64} # Sensitivity for parameters + dual_p::Dict{MOI.ConstraintIndex,Float64} # Dual wrt parameters end # Define the form of the NLP @@ -513,15 +515,21 @@ function DiffOpt.forward_differentiate!(model::Model; tol = 1e-6) end # Compute Jacobian - Δs = _compute_sensitivity(model; tol = tol) + Δs, df_dp = _compute_sensitivity(model; tol = tol) # Extract primal and dual sensitivities primal_Δs = Δs[1:length(model.cache.primal_vars), :] * Δp # Exclude slacks dual_Δs = Δs[cache.index_duals, :] * Δp # Includes constraints and bounds + # Dual wrt parameters + varorder = + sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) + dual_p = [df_dp[form.var2param[var_idx].value] for var_idx in varorder] + model.forw_grad_cache = ForwCache(; primal_Δs = Dict(model.cache.primal_vars .=> primal_Δs), dual_Δs = dual_Δs, + dual_p = Dict([form.var2ci[var_idx] for var_idx in varorder] .=> dual_p), ) end return nothing @@ -533,7 +541,7 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) form = model.model # Compute Jacobian - Δs = _compute_sensitivity(model; tol = tol) + Δs, df_dp = _compute_sensitivity(model; tol = tol) num_primal = length(cache.primal_vars) # Fetch primal sensitivities Δx = zeros(num_primal) @@ -576,7 +584,12 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder] - model.back_grad_cache = ReverseCache(; Δp = Δp) + # Dual wrt parameters + dual_p = [df_dp[form.var2param[var_idx].value] for var_idx in varorder] + + model.back_grad_cache = ReverseCache(; Δp = Δp, + dual_p = Dict([form.var2ci[var_idx] for var_idx in varorder] .=> dual_p), + ) end return nothing end @@ -610,4 +623,16 @@ function MOI.get( return MOI.Parameter{T}(model.back_grad_cache.Δp[ci.value]) end +function MOI.get( + model::Model, + ::MOI.ConstraintDual, + ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, +) where {T} + if !isnothing(model.forw_grad_cache) + return model.forw_grad_cache.dual_p[ci] + else + return model.back_grad_cache.dual_p[ci] + end +end + end # module NonLinearProgram diff --git a/src/NonLinearProgram/nlp_utilities.jl b/src/NonLinearProgram/nlp_utilities.jl index b25e5aff..7fe144b0 100644 --- a/src/NonLinearProgram/nlp_utilities.jl +++ b/src/NonLinearProgram/nlp_utilities.jl @@ -27,6 +27,13 @@ function _fill_off_diagonal(H::SparseMatrixCSC) return ret end +function _compute_gradient(model::Model) + evaluator = model.cache.evaluator + grad = zeros(length(model.x)) + MOI.eval_objective_gradient(evaluator, grad, model.x) + return grad +end + """ _compute_optimal_hessian(evaluator::MOI.Nonlinear.Evaluator, rows::Vector{JuMP.ConstraintRef}, x::Vector{JuMP.VariableRef}) @@ -104,7 +111,7 @@ function _create_evaluator(form::Form) backend, MOI.VariableIndex.(1:form.num_variables), ) - MOI.initialize(evaluator, [:Hess, :Jac]) + MOI.initialize(evaluator, [:Hess, :Jac, :Grad]) return evaluator end @@ -496,5 +503,10 @@ function _compute_sensitivity(model::Model; tol = 1e-6) ∂s[num_w+num_cons+1:num_w+num_cons+num_lower, :] *= _sense_multiplier # Dual bounds upper ∂s[num_w+num_cons+num_lower+1:end, :] *= -_sense_multiplier - return ∂s + + # dual wrt parameter + primal_idx = [i.value for i in model.cache.primal_vars] + df_dx = _compute_gradient(model)[primal_idx] + df_dp = df_dx'∂s[1:num_vars, :] + return ∂s, df_dp end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 5c7c7be8..55f4971b 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -732,6 +732,26 @@ function MOI.get( ) end +function MOI.get( + model::Optimizer, + attr::MOI.ConstraintDual, + ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, +) where {T} + try + return MOI.get( + _checked_diff(model, attr, :forward_differentiate!), + attr, + model.index_map[ci], + ) + catch + return MOI.get( + _checked_diff(model, attr, :reverse_differentiate!), + attr, + model.index_map[ci], + ) + end +end + function MOI.supports( ::Optimizer, ::ReverseVariablePrimal, diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 86205003..75a77965 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -161,6 +161,8 @@ function test_analytical_simple(; P = 2) # Number of parameters # Compute derivatives DiffOpt.forward_differentiate!(m) + @test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8)) + # Test sensitivities @test_throws ErrorException MOI.get( m.moi_backend.optimizer.model.diff.model, @@ -736,6 +738,8 @@ function test_ReverseConstraintDual() # Compute derivatives DiffOpt.reverse_differentiate!(m) + @test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8)) + # Test sensitivities ReverseConstraintSet @test all( isapprox( From c96e2c0619c99e4afa6e2bfe260dc4c4d7851310 Mon Sep 17 00:00:00 2001 From: Andrew Date: Sat, 22 Feb 2025 15:47:42 -0500 Subject: [PATCH 02/13] fix api and more tests --- src/DiffOpt.jl | 1 + src/NonLinearProgram/NonLinearProgram.jl | 19 ++++-- src/jump_moi_overloads.jl | 25 ++++++++ test/nlp_program.jl | 81 +++++++++++++++++++++++- 4 files changed, 118 insertions(+), 8 deletions(-) diff --git a/src/DiffOpt.jl b/src/DiffOpt.jl index 0dce78d9..519eef5b 100644 --- a/src/DiffOpt.jl +++ b/src/DiffOpt.jl @@ -14,6 +14,7 @@ import MathOptInterface as MOI import MathOptSetDistances as MOSD import ParametricOptInterface as POI import SparseArrays +import JuMP: dual include("utils.jl") include("product_of_sets.jl") diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index 2130a7b8..dc3906d3 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -524,12 +524,15 @@ function DiffOpt.forward_differentiate!(model::Model; tol = 1e-6) # Dual wrt parameters varorder = sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) - dual_p = [df_dp[form.var2param[var_idx].value] for var_idx in varorder] + dual_p = + [df_dp[form.var2param[var_idx].value] for var_idx in varorder] model.forw_grad_cache = ForwCache(; primal_Δs = Dict(model.cache.primal_vars .=> primal_Δs), dual_Δs = dual_Δs, - dual_p = Dict([form.var2ci[var_idx] for var_idx in varorder] .=> dual_p), + dual_p = Dict( + [form.var2ci[var_idx] for var_idx in varorder] .=> dual_p, + ), ) end return nothing @@ -585,10 +588,14 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder] # Dual wrt parameters - dual_p = [df_dp[form.var2param[var_idx].value] for var_idx in varorder] - - model.back_grad_cache = ReverseCache(; Δp = Δp, - dual_p = Dict([form.var2ci[var_idx] for var_idx in varorder] .=> dual_p), + dual_p = + [df_dp[form.var2param[var_idx].value] for var_idx in varorder] + + model.back_grad_cache = ReverseCache(; + Δp = Δp, + dual_p = Dict( + [form.var2ci[var_idx] for var_idx in varorder] .=> dual_p, + ), ) end return nothing diff --git a/src/jump_moi_overloads.jl b/src/jump_moi_overloads.jl index f402ac35..1d237292 100644 --- a/src/jump_moi_overloads.jl +++ b/src/jump_moi_overloads.jl @@ -107,6 +107,31 @@ function MOI.get( return _moi_get_result(JuMP.backend(model), attr, JuMP.index(var_ref)) end +function MOI.set( + model::JuMP.Model, + attr::ReverseVariablePrimal, + var_ref::JuMP.VariableRef, + val::Number, +) + JuMP.check_belongs_to_model(var_ref, model) + return MOI.set(JuMP.backend(model), attr, JuMP.index(var_ref), val) +end + +function MOI.set( + model::JuMP.Model, + attr::ReverseConstraintDual, + con_ref::JuMP.ConstraintRef, + val::Number, +) + JuMP.check_belongs_to_model(con_ref, model) + return MOI.set(JuMP.backend(model), attr, JuMP.index(con_ref), val) +end + +function JuMP.dual(var_ref::JuMP.VariableRef; result::Int = 1) + JuMP.is_parameter(var_ref) || error("Variable is not a parameter") + return dual(ParameterRef(var_ref); result = result) +end + function MOI.get( model::JuMP.Model, attr::ReverseConstraintSet, diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 75a77965..579de84e 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -161,7 +161,9 @@ function test_analytical_simple(; P = 2) # Number of parameters # Compute derivatives DiffOpt.forward_differentiate!(m) - @test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8)) + # test dual wrt parameters + @test_throws ErrorException dual.(x) + @test all(isapprox.(dual.(p), dual.(con); atol = 1e-8)) # Test sensitivities @test_throws ErrorException MOI.get( @@ -643,6 +645,79 @@ function test_compute_derivatives_Finite_Diff(; end end +################################################ +#= +# Test Dual wrt Parameters +=# +################################################ + +function test_dual_wrt_parameters() + # Model 1 + model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) + set_silent(model) + + # Parameters + @variable(model, p ∈ MOI.Parameter(1.5)) + @variable(model, p_prox) + + # Variables + @variable(model, x) + + # Constraints + @constraint(model, con, p_prox == p) # dual fishing :) + @constraint(model, x * sin(p_prox) == 1) + @objective(model, Min, sum(x)) + + optimize!(model) + @assert is_solved_and_feasible(model) + + # Set pertubations + MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(0.1)) + + # Compute derivatives + DiffOpt.forward_differentiate!(model) + + # Test dual wrt parameters + @test isapprox( + dual(p), + dual(con); + atol = 1e-4, + ) + + # Model 2 + model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) + set_silent(model) + + # Parameters + @variable(model, p ∈ MOI.Parameter(1.5)) + @variable(model, p_prox) + + # Variables + @variable(model, x) + + # Constraints + @constraint(model, con, p_prox == p) # dual fishing :) + @constraint(model, x * sin(p_prox) >= 1) + @constraint(model, x + p_prox >= 3) + @objective(model, Min, sum(x.^2)) + + optimize!(model) + @assert is_solved_and_feasible(model) + + # Set pertubations + MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(0.1)) + + # Compute derivatives + DiffOpt.forward_differentiate!(model) + + # Test dual wrt parameters + @test isapprox( + dual(p), + dual(con); + atol = 1e-4, + ) +end + ################################################ #= # Test Sensitivity through Reverse Mode @@ -738,7 +813,9 @@ function test_ReverseConstraintDual() # Compute derivatives DiffOpt.reverse_differentiate!(m) - @test all(isapprox.(dual.(ParameterRef.(p)), dual.(con); atol = 1e-8)) + # Test dual wrt parameters + @test_throws ErrorException dual.(x) + @test all(isapprox.(dual.(p), dual.(con); atol = 1e-8)) # Test sensitivities ReverseConstraintSet @test all( From e459641d92b907570dae26e618d98e30fb81a3c9 Mon Sep 17 00:00:00 2001 From: Andrew Date: Sat, 22 Feb 2025 15:47:54 -0500 Subject: [PATCH 03/13] format --- test/nlp_program.jl | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 579de84e..1044069b 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -672,17 +672,18 @@ function test_dual_wrt_parameters() @assert is_solved_and_feasible(model) # Set pertubations - MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(0.1)) + MOI.set( + model, + DiffOpt.ForwardConstraintSet(), + ParameterRef(p), + Parameter(0.1), + ) # Compute derivatives DiffOpt.forward_differentiate!(model) # Test dual wrt parameters - @test isapprox( - dual(p), - dual(con); - atol = 1e-4, - ) + @test isapprox(dual(p), dual(con); atol = 1e-4) # Model 2 model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) @@ -699,23 +700,24 @@ function test_dual_wrt_parameters() @constraint(model, con, p_prox == p) # dual fishing :) @constraint(model, x * sin(p_prox) >= 1) @constraint(model, x + p_prox >= 3) - @objective(model, Min, sum(x.^2)) + @objective(model, Min, sum(x .^ 2)) optimize!(model) @assert is_solved_and_feasible(model) # Set pertubations - MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(0.1)) + MOI.set( + model, + DiffOpt.ForwardConstraintSet(), + ParameterRef(p), + Parameter(0.1), + ) # Compute derivatives DiffOpt.forward_differentiate!(model) # Test dual wrt parameters - @test isapprox( - dual(p), - dual(con); - atol = 1e-4, - ) + @test isapprox(dual(p), dual(con); atol = 1e-4) end ################################################ From f243eb8b692d43ccf08a6cb8bcc3dadcc3b5d206 Mon Sep 17 00:00:00 2001 From: Andrew Date: Sat, 22 Feb 2025 15:58:19 -0500 Subject: [PATCH 04/13] update docs --- docs/src/usage.md | 43 +++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 43 insertions(+) diff --git a/docs/src/usage.md b/docs/src/usage.md index ef6445c7..421cf29e 100644 --- a/docs/src/usage.md +++ b/docs/src/usage.md @@ -117,4 +117,47 @@ DiffOpt.reverse_differentiate!(model) @show MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)) == MOI.Parameter(direction_x * 3 / pc_val) @show abs(MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(pc)).value - -direction_x * 3 * p_val / pc_val^2) < 1e-5 +``` + +## Calculating dual with respect to parameters (currently only supported for Nonlinear Programs) + +Consider a differentiable model with parameters `p` and `pc` as in the previous example: + +```julia +using JuMP, DiffOpt, HiGHS + +model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) +set_silent(model) + +p_val = 4.0 +pc_val = 2.0 +@variable(model, x) +@variable(model, p in Parameter(p_val)) +@variable(model, pc in Parameter(pc_val)) +@constraint(model, cons, pc * x >= 3 * p) +@objective(model, Min, x^4) +optimize!(model) + +direction_p = 3.0 +MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(direction_p)) +DiffOpt.forward_differentiate!(model) + +``` + +Using Lagrandian duality we could already calculate the dual variables with respect to the constraints. +So, if the parameter only appears in the RHS of the constraints, calculating the dual with respect to the parameter equivalent to calculating the dual with respect to the constraints the parameter appears in (e.g, `cons` in this case for parameter `p`). + +On the other hand, if the parameter appears in the LHS of the constraints, we can calculate the dual with respect to the parameter using the sensitivities of the variables with respect to the parameter, \( \frac{\partial x}{\partial p} \), and the gradient of the objective with respect to the variables \( \frac{\partial f}{\partial x} \). + +```math +\frac{\partial f}{\partial p} = \frac{\partial f}{\partial x} \frac{\partial x}{\partial p} +``` + +Both of these cases are supported in `DiffOpt`: + +```julia + +dual(p) # works + +dual(pc) # also works ``` \ No newline at end of file From 0d2ec33332efeeecf33fc14b351577862a26f3a6 Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 27 Feb 2025 10:48:56 -0500 Subject: [PATCH 05/13] update for appropriate API --- docs/src/usage.md | 16 +--- src/NonLinearProgram/NonLinearProgram.jl | 98 ++++++++++-------------- src/diff_opt.jl | 40 ++++++++++ src/jump_moi_overloads.jl | 15 ++++ src/moi_wrapper.jl | 43 +++++++---- test/nlp_program.jl | 90 ++++++++++++++++++---- 6 files changed, 202 insertions(+), 100 deletions(-) diff --git a/docs/src/usage.md b/docs/src/usage.md index 421cf29e..2f938611 100644 --- a/docs/src/usage.md +++ b/docs/src/usage.md @@ -119,7 +119,7 @@ DiffOpt.reverse_differentiate!(model) -direction_x * 3 * p_val / pc_val^2) < 1e-5 ``` -## Calculating dual with respect to parameters (currently only supported for Nonlinear Programs) +## Calculating objective sensitivity with respect to parameters (currently only supported for Nonlinear Programs) Consider a differentiable model with parameters `p` and `pc` as in the previous example: @@ -144,20 +144,10 @@ DiffOpt.forward_differentiate!(model) ``` -Using Lagrandian duality we could already calculate the dual variables with respect to the constraints. -So, if the parameter only appears in the RHS of the constraints, calculating the dual with respect to the parameter equivalent to calculating the dual with respect to the constraints the parameter appears in (e.g, `cons` in this case for parameter `p`). +Using Lagrandian duality we could already calculate the objective sensitivity with respect to parameters that appear in the RHS of the constraints (e.g, `cons` in this case for parameter `p`). -On the other hand, if the parameter appears in the LHS of the constraints, we can calculate the dual with respect to the parameter using the sensitivities of the variables with respect to the parameter, \( \frac{\partial x}{\partial p} \), and the gradient of the objective with respect to the variables \( \frac{\partial f}{\partial x} \). +On the other hand, if the parameter appears in the LHS of the constraints, we can calculate the objective sensitivity with respect to the parameter using: the sensitivities of the variables with respect to the parameter, \( \frac{\partial x}{\partial p} \), and the gradient of the objective with respect to the variables \( \frac{\partial f}{\partial x} \): ```math \frac{\partial f}{\partial p} = \frac{\partial f}{\partial x} \frac{\partial x}{\partial p} ``` - -Both of these cases are supported in `DiffOpt`: - -```julia - -dual(p) # works - -dual(pc) # also works -``` \ No newline at end of file diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index dc3906d3..6499a710 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -27,12 +27,11 @@ end Base.@kwdef struct ForwCache primal_Δs::Dict{MOI.VariableIndex,Float64} # Sensitivity for primal variables (indexed by VariableIndex) dual_Δs::Vector{Float64} # Sensitivity for constraints and bounds (indexed by ConstraintIndex) - dual_p::Dict{MOI.ConstraintIndex,Float64} # Dual wrt parameters + dual_p::Float64 # Objective Sensitivity wrt parameters end Base.@kwdef struct ReverseCache Δp::Vector{Float64} # Sensitivity for parameters - dual_p::Dict{MOI.ConstraintIndex,Float64} # Dual wrt parameters end # Define the form of the NLP @@ -521,18 +520,13 @@ function DiffOpt.forward_differentiate!(model::Model; tol = 1e-6) primal_Δs = Δs[1:length(model.cache.primal_vars), :] * Δp # Exclude slacks dual_Δs = Δs[cache.index_duals, :] * Δp # Includes constraints and bounds - # Dual wrt parameters - varorder = - sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) - dual_p = - [df_dp[form.var2param[var_idx].value] for var_idx in varorder] + # obj sensitivity wrt parameters + dual_p = df_dp * Δp model.forw_grad_cache = ForwCache(; primal_Δs = Dict(model.cache.primal_vars .=> primal_Δs), dual_Δs = dual_Δs, - dual_p = Dict( - [form.var2ci[var_idx] for var_idx in varorder] .=> dual_p, - ), + dual_p = dual_p, ) end return nothing @@ -545,57 +539,54 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) # Compute Jacobian Δs, df_dp = _compute_sensitivity(model; tol = tol) - num_primal = length(cache.primal_vars) - # Fetch primal sensitivities - Δx = zeros(num_primal) - for (i, var_idx) in enumerate(cache.primal_vars) - if haskey(model.input_cache.dx, var_idx) - Δx[i] = model.input_cache.dx[var_idx] + Δp = if !iszero(model.input_cache.dobj) + model.input_cache.dobj * df_dp + else + num_primal = length(cache.primal_vars) + # Fetch primal sensitivities + Δx = zeros(num_primal) + for (i, var_idx) in enumerate(cache.primal_vars) + if haskey(model.input_cache.dx, var_idx) + Δx[i] = model.input_cache.dx[var_idx] + end end - end - # Fetch dual sensitivities - num_constraints = length(cache.cons) - num_up = length(cache.has_up) - num_low = length(cache.has_low) - Δdual = zeros(num_constraints + num_up + num_low) - for (i, ci) in enumerate(cache.cons) - idx = form.nlp_index_2_constraint[ci] - if haskey(model.input_cache.dy, idx) - Δdual[i] = model.input_cache.dy[idx] + # Fetch dual sensitivities + num_constraints = length(cache.cons) + num_up = length(cache.has_up) + num_low = length(cache.has_low) + Δdual = zeros(num_constraints + num_up + num_low) + for (i, ci) in enumerate(cache.cons) + idx = form.nlp_index_2_constraint[ci] + if haskey(model.input_cache.dy, idx) + Δdual[i] = model.input_cache.dy[idx] + end end - end - for (i, var_idx) in enumerate(cache.primal_vars[cache.has_low]) - idx = form.constraint_lower_bounds[var_idx.value].value - if haskey(model.input_cache.dy, idx) - Δdual[num_constraints+i] = model.input_cache.dy[idx] + for (i, var_idx) in enumerate(cache.primal_vars[cache.has_low]) + idx = form.constraint_lower_bounds[var_idx.value].value + if haskey(model.input_cache.dy, idx) + Δdual[num_constraints+i] = model.input_cache.dy[idx] + end end - end - for (i, var_idx) in enumerate(cache.primal_vars[cache.has_up]) - idx = form.constraint_upper_bounds[var_idx.value].value - if haskey(model.input_cache.dy, idx) - Δdual[num_constraints+num_low+i] = model.input_cache.dy[idx] + for (i, var_idx) in enumerate(cache.primal_vars[cache.has_up]) + idx = form.constraint_upper_bounds[var_idx.value].value + if haskey(model.input_cache.dy, idx) + Δdual[num_constraints+num_low+i] = model.input_cache.dy[idx] + end end + # Extract Parameter sensitivities + Δw = zeros(size(Δs, 1)) + Δw[1:num_primal] = Δx + Δw[cache.index_duals] = Δdual + Δp = Δs' * Δw end - # Extract Parameter sensitivities - Δw = zeros(size(Δs, 1)) - Δw[1:num_primal] = Δx - Δw[cache.index_duals] = Δdual - Δp = Δs' * Δw # Order by ConstraintIndex varorder = sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder] - # Dual wrt parameters - dual_p = - [df_dp[form.var2param[var_idx].value] for var_idx in varorder] - model.back_grad_cache = ReverseCache(; Δp = Δp, - dual_p = Dict( - [form.var2ci[var_idx] for var_idx in varorder] .=> dual_p, - ), ) end return nothing @@ -632,14 +623,9 @@ end function MOI.get( model::Model, - ::MOI.ConstraintDual, - ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, -) where {T} - if !isnothing(model.forw_grad_cache) - return model.forw_grad_cache.dual_p[ci] - else - return model.back_grad_cache.dual_p[ci] - end + ::DiffOpt.ForwardObjectiveSensitivity, +) + return model.forw_grad_cache.dual_p end end # module NonLinearProgram diff --git a/src/diff_opt.jl b/src/diff_opt.jl index b850e72a..ae52ef66 100644 --- a/src/diff_opt.jl +++ b/src/diff_opt.jl @@ -16,6 +16,7 @@ Base.@kwdef mutable struct InputCache dp::Dict{MOI.ConstraintIndex,Float64} = Dict{MOI.ConstraintIndex,Float64}() # Specifically for NonLinearProgram dy::Dict{MOI.ConstraintIndex,Float64} = Dict{MOI.ConstraintIndex,Float64}() # Dual sensitivity currently only works for NonLinearProgram + dobj::Float64 = 0.0 # Objective input sensitivity for reverse differentiation # ds # dy #= [d\lambda, d\nu] for QP # FIXME Would it be possible to have a DoubleDict where the value depends @@ -35,6 +36,7 @@ function Base.empty!(cache::InputCache) empty!(cache.dx) empty!(cache.dp) empty!(cache.dy) + cache.dobj = 0.0 empty!(cache.scalar_constraints) empty!(cache.vector_constraints) cache.objective = nothing @@ -184,6 +186,20 @@ MOI.set(model, DiffOpt.ReverseConstraintDual(), ci, value) """ struct ReverseConstraintDual <: MOI.AbstractConstraintAttribute end +""" + ReverseObjectiveSensitivity <: MOI.AbstractModelAttribute + +A `MOI.AbstractModelAttribute` to set input data for reverse differentiation. + +For instance, to set the sensitivity of the parameter perturbation with respect to the +objective function perturbation, do the following: + +```julia +MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), value) +``` +""" +struct ReverseObjectiveSensitivity <: MOI.AbstractModelAttribute end + """ ForwardConstraintDual <: MOI.AbstractConstraintAttribute @@ -199,6 +215,21 @@ struct ForwardConstraintDual <: MOI.AbstractConstraintAttribute end MOI.is_set_by_optimize(::ForwardConstraintDual) = true +""" + ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute + +A `MOI.AbstractModelAttribute` to get output objective sensitivity data from forward differentiation. + +For instance, to get the sensitivity of the objective function with respect to the parameter perturbation, do the following: + +```julia +MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) +``` +""" +struct ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute end + +MOI.is_set_by_optimize(::ForwardObjectiveSensitivity) = true + """ ReverseObjectiveFunction <: MOI.AbstractModelAttribute @@ -403,6 +434,15 @@ function MOI.set( return end +function MOI.set( + model::AbstractModel, + ::ReverseObjectiveSensitivity, + val, +) + model.input_cache.dobj = val + return +end + function MOI.set( model::AbstractModel, ::ForwardConstraintFunction, diff --git a/src/jump_moi_overloads.jl b/src/jump_moi_overloads.jl index 1d237292..6cfc0d50 100644 --- a/src/jump_moi_overloads.jl +++ b/src/jump_moi_overloads.jl @@ -66,6 +66,13 @@ function MOI.get( return JuMP.jump_function(model, moi_func) end +function MOI.get( + model::JuMP.Model, + attr::ForwardObjectiveSensitivity, +) + return MOI.get(JuMP.backend(model), attr) +end + function MOI.get(model::JuMP.Model, attr::ReverseObjectiveFunction) func = MOI.get(JuMP.backend(model), attr) return JuMP.jump_function(model, func) @@ -127,6 +134,14 @@ function MOI.set( return MOI.set(JuMP.backend(model), attr, JuMP.index(con_ref), val) end +function MOI.set( + model::JuMP.Model, + attr::ReverseObjectiveSensitivity, + val::Number, +) + return MOI.set(JuMP.backend(model), attr, val) +end + function JuMP.dual(var_ref::JuMP.VariableRef; result::Int = 1) JuMP.is_parameter(var_ref) || error("Variable is not a parameter") return dual(ParameterRef(var_ref); result = result) diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 55f4971b..8a61c1d7 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -520,6 +520,11 @@ function reverse_differentiate!(model::Optimizer) "Trying to compute the reverse differentiation on a model with termination status $(st)", ) end + if !iszero(model.input_cache.dobj) && (!isempty(model.input_cache.dx) || !isempty(model.input_cache.dy)) + error( + "Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.", + ) + end diff = _diff(model) MOI.set( diff, @@ -532,6 +537,11 @@ function reverse_differentiate!(model::Optimizer) for (vi, value) in model.input_cache.dy MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end + MOI.set( + diff, + ReverseObjectiveSensitivity(), + model.input_cache.dobj, + ) return reverse_differentiate!(diff) end @@ -734,22 +744,12 @@ end function MOI.get( model::Optimizer, - attr::MOI.ConstraintDual, - ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, -) where {T} - try - return MOI.get( - _checked_diff(model, attr, :forward_differentiate!), - attr, - model.index_map[ci], - ) - catch - return MOI.get( - _checked_diff(model, attr, :reverse_differentiate!), - attr, - model.index_map[ci], - ) - end + attr::ForwardObjectiveSensitivity, +) + return MOI.get( + _checked_diff(model, attr, :forward_differentiate!), + attr, + ) end function MOI.supports( @@ -786,6 +786,8 @@ function MOI.supports( return true end +MOI.supports(::Optimizer, ::ReverseObjectiveSensitivity) = true + function MOI.set( model::Optimizer, ::ReverseConstraintDual, @@ -796,6 +798,15 @@ function MOI.set( return end +function MOI.set( + model::Optimizer, + ::ReverseObjectiveSensitivity, + val, +) + model.input_cache.dobj = val + return +end + function MOI.get( model::Optimizer, ::ReverseConstraintDual, diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 1044069b..333665b0 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -161,9 +161,9 @@ function test_analytical_simple(; P = 2) # Number of parameters # Compute derivatives DiffOpt.forward_differentiate!(m) - # test dual wrt parameters - @test_throws ErrorException dual.(x) - @test all(isapprox.(dual.(p), dual.(con); atol = 1e-8)) + # test Objective Sensitivity wrt parameters + df_dp = MOI.get(m, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, dot(dual.(con), Δp); atol = 1e-4) # Test sensitivities @test_throws ErrorException MOI.get( @@ -647,11 +647,11 @@ end ################################################ #= -# Test Dual wrt Parameters +# Test Objective Sensitivity wrt Parameters =# ################################################ -function test_dual_wrt_parameters() +function test_ObjectiveSensitivity() # Model 1 model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) set_silent(model) @@ -672,18 +672,58 @@ function test_dual_wrt_parameters() @assert is_solved_and_feasible(model) # Set pertubations + Δp = 0.1 MOI.set( model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), - Parameter(0.1), + Parameter(Δp), ) # Compute derivatives DiffOpt.forward_differentiate!(model) - # Test dual wrt parameters - @test isapprox(dual(p), dual(con); atol = 1e-4) + # Test Objective Sensitivity wrt parameters + df_dp = MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, dual(con) * Δp; atol = 1e-4) + + # Clean up + DiffOpt.empty_input_sensitivities!(model) + + # Set Too Many Sensitivities + Δf = 0.5 + MOI.set( + model, + DiffOpt.ReverseObjectiveSensitivity(), + Δf, + ) + + MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0) + + # Compute derivatives + @test_throws ErrorException DiffOpt.reverse_differentiate!(model) + + DiffOpt.empty_input_sensitivities!(model) + + # Set Reverse Objective Sensitivity + Δf = 0.5 + MOI.set( + model, + DiffOpt.ReverseObjectiveSensitivity(), + Δf, + ) + + # Compute derivatives + DiffOpt.reverse_differentiate!(model) + + # Test Objective Sensitivity wrt parameters + dp = MOI.get( + model, + DiffOpt.ReverseConstraintSet(), + ParameterRef(p), + ).value + + @test isapprox(dp, dual(con) * Δf; atol = 1e-4) # Model 2 model = Model(() -> DiffOpt.diff_optimizer(Ipopt.Optimizer)) @@ -710,14 +750,38 @@ function test_dual_wrt_parameters() model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), - Parameter(0.1), + Parameter(Δp), ) # Compute derivatives DiffOpt.forward_differentiate!(model) - # Test dual wrt parameters - @test isapprox(dual(p), dual(con); atol = 1e-4) + # Test Objective Sensitivity wrt parameters + df_dp = MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) + @test isapprox(df_dp, dual(con) * Δp; atol = 1e-4) + + # Clean up + DiffOpt.empty_input_sensitivities!(model) + + # Set Reverse Objective Sensitivity + Δf = 0.5 + MOI.set( + model, + DiffOpt.ReverseObjectiveSensitivity(), + Δf, + ) + + # Compute derivatives + DiffOpt.reverse_differentiate!(model) + + # Test Objective Sensitivity wrt parameters + dp = MOI.get( + model, + DiffOpt.ReverseConstraintSet(), + ParameterRef(p), + ).value + + @test isapprox(dp, dual(con) * Δf; atol = 1e-4) end ################################################ @@ -815,10 +879,6 @@ function test_ReverseConstraintDual() # Compute derivatives DiffOpt.reverse_differentiate!(m) - # Test dual wrt parameters - @test_throws ErrorException dual.(x) - @test all(isapprox.(dual.(p), dual.(con); atol = 1e-8)) - # Test sensitivities ReverseConstraintSet @test all( isapprox( From 6d2efcf834e7d2f8c6d291298113afa2d8a66442 Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 27 Feb 2025 10:58:53 -0500 Subject: [PATCH 06/13] update documemtation --- docs/src/usage.md | 33 +++++++++++++++++++++++++++++++++ 1 file changed, 33 insertions(+) diff --git a/docs/src/usage.md b/docs/src/usage.md index 2f938611..660a4444 100644 --- a/docs/src/usage.md +++ b/docs/src/usage.md @@ -151,3 +151,36 @@ On the other hand, if the parameter appears in the LHS of the constraints, we ca ```math \frac{\partial f}{\partial p} = \frac{\partial f}{\partial x} \frac{\partial x}{\partial p} ``` + +In order to calculate the objective perturbation with respect to the parameter perturbation vector, we can use the following code: + +```julia +# Always a good practice to clear previously set sensitivities +DiffOpt.empty_input_sensitivities!(model) + +MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p), Parameter(3.0)) +MOI.set(model, DiffOpt.ForwardConstraintSet(), ParameterRef(p_c), Parameter(3.0)) +DiffOpt.forward_differentiate!(model) + +MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) +``` + +In the backward mode, we can calculate the parameter perturbation with respect to the objective perturbation: + +```julia +# Always a good practice to clear previously set sensitivities +DiffOpt.empty_input_sensitivities!(model) + +MOI.set( + model, + DiffOpt.ReverseObjectiveSensitivity(), + 0.1, +) + +DiffOpt.reverse_differentiate!(model) + +MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)) +``` + +It is important to note that the (reverse) parameter perturbation given an objective perturbation is somewhat equivalent to the perturbation with respect to solution (since one can be calculated from the other). Therefore, one cannot set both the objective sensitivity (`DiffOpt.ReverseObjectiveSensitivity`) and the solution sensitivity (e.g. `DiffOpt.ReverseVariablePrimal`) at the same time. +``` \ No newline at end of file From f366f2d90ccb692ee987835b731fc438fc58179d Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 27 Feb 2025 11:01:11 -0500 Subject: [PATCH 07/13] format --- src/NonLinearProgram/NonLinearProgram.jl | 9 ++----- src/diff_opt.jl | 6 +---- src/jump_moi_overloads.jl | 5 +--- src/moi_wrapper.jl | 25 +++++--------------- test/nlp_program.jl | 30 ++++-------------------- 5 files changed, 15 insertions(+), 60 deletions(-) diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index 6499a710..d01f3196 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -585,9 +585,7 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder] - model.back_grad_cache = ReverseCache(; - Δp = Δp, - ) + model.back_grad_cache = ReverseCache(; Δp = Δp) end return nothing end @@ -621,10 +619,7 @@ function MOI.get( return MOI.Parameter{T}(model.back_grad_cache.Δp[ci.value]) end -function MOI.get( - model::Model, - ::DiffOpt.ForwardObjectiveSensitivity, -) +function MOI.get(model::Model, ::DiffOpt.ForwardObjectiveSensitivity) return model.forw_grad_cache.dual_p end diff --git a/src/diff_opt.jl b/src/diff_opt.jl index ae52ef66..8940ba93 100644 --- a/src/diff_opt.jl +++ b/src/diff_opt.jl @@ -434,11 +434,7 @@ function MOI.set( return end -function MOI.set( - model::AbstractModel, - ::ReverseObjectiveSensitivity, - val, -) +function MOI.set(model::AbstractModel, ::ReverseObjectiveSensitivity, val) model.input_cache.dobj = val return end diff --git a/src/jump_moi_overloads.jl b/src/jump_moi_overloads.jl index 6cfc0d50..bf562f1c 100644 --- a/src/jump_moi_overloads.jl +++ b/src/jump_moi_overloads.jl @@ -66,10 +66,7 @@ function MOI.get( return JuMP.jump_function(model, moi_func) end -function MOI.get( - model::JuMP.Model, - attr::ForwardObjectiveSensitivity, -) +function MOI.get(model::JuMP.Model, attr::ForwardObjectiveSensitivity) return MOI.get(JuMP.backend(model), attr) end diff --git a/src/moi_wrapper.jl b/src/moi_wrapper.jl index 8a61c1d7..bafc2e65 100644 --- a/src/moi_wrapper.jl +++ b/src/moi_wrapper.jl @@ -520,7 +520,8 @@ function reverse_differentiate!(model::Optimizer) "Trying to compute the reverse differentiation on a model with termination status $(st)", ) end - if !iszero(model.input_cache.dobj) && (!isempty(model.input_cache.dx) || !isempty(model.input_cache.dy)) + if !iszero(model.input_cache.dobj) && + (!isempty(model.input_cache.dx) || !isempty(model.input_cache.dy)) error( "Cannot compute the reverse differentiation with both solution sensitivities and objective sensitivities.", ) @@ -537,11 +538,7 @@ function reverse_differentiate!(model::Optimizer) for (vi, value) in model.input_cache.dy MOI.set(diff, ReverseConstraintDual(), model.index_map[vi], value) end - MOI.set( - diff, - ReverseObjectiveSensitivity(), - model.input_cache.dobj, - ) + MOI.set(diff, ReverseObjectiveSensitivity(), model.input_cache.dobj) return reverse_differentiate!(diff) end @@ -742,14 +739,8 @@ function MOI.get( ) end -function MOI.get( - model::Optimizer, - attr::ForwardObjectiveSensitivity, -) - return MOI.get( - _checked_diff(model, attr, :forward_differentiate!), - attr, - ) +function MOI.get(model::Optimizer, attr::ForwardObjectiveSensitivity) + return MOI.get(_checked_diff(model, attr, :forward_differentiate!), attr) end function MOI.supports( @@ -798,11 +789,7 @@ function MOI.set( return end -function MOI.set( - model::Optimizer, - ::ReverseObjectiveSensitivity, - val, -) +function MOI.set(model::Optimizer, ::ReverseObjectiveSensitivity, val) model.input_cache.dobj = val return end diff --git a/test/nlp_program.jl b/test/nlp_program.jl index 333665b0..f0ba3d0d 100644 --- a/test/nlp_program.jl +++ b/test/nlp_program.jl @@ -692,11 +692,7 @@ function test_ObjectiveSensitivity() # Set Too Many Sensitivities Δf = 0.5 - MOI.set( - model, - DiffOpt.ReverseObjectiveSensitivity(), - Δf, - ) + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), Δf) MOI.set(model, DiffOpt.ReverseVariablePrimal(), x, 1.0) @@ -707,21 +703,13 @@ function test_ObjectiveSensitivity() # Set Reverse Objective Sensitivity Δf = 0.5 - MOI.set( - model, - DiffOpt.ReverseObjectiveSensitivity(), - Δf, - ) + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), Δf) # Compute derivatives DiffOpt.reverse_differentiate!(model) # Test Objective Sensitivity wrt parameters - dp = MOI.get( - model, - DiffOpt.ReverseConstraintSet(), - ParameterRef(p), - ).value + dp = MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)).value @test isapprox(dp, dual(con) * Δf; atol = 1e-4) @@ -765,21 +753,13 @@ function test_ObjectiveSensitivity() # Set Reverse Objective Sensitivity Δf = 0.5 - MOI.set( - model, - DiffOpt.ReverseObjectiveSensitivity(), - Δf, - ) + MOI.set(model, DiffOpt.ReverseObjectiveSensitivity(), Δf) # Compute derivatives DiffOpt.reverse_differentiate!(model) # Test Objective Sensitivity wrt parameters - dp = MOI.get( - model, - DiffOpt.ReverseConstraintSet(), - ParameterRef(p), - ).value + dp = MOI.get(model, DiffOpt.ReverseConstraintSet(), ParameterRef(p)).value @test isapprox(dp, dual(con) * Δf; atol = 1e-4) end From 1e2ac7c0b28e0fa6e35f7c1c82c21945f883e092 Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 27 Feb 2025 11:18:46 -0500 Subject: [PATCH 08/13] rm legacy function --- src/jump_moi_overloads.jl | 5 ----- 1 file changed, 5 deletions(-) diff --git a/src/jump_moi_overloads.jl b/src/jump_moi_overloads.jl index bf562f1c..3826a02e 100644 --- a/src/jump_moi_overloads.jl +++ b/src/jump_moi_overloads.jl @@ -139,11 +139,6 @@ function MOI.set( return MOI.set(JuMP.backend(model), attr, val) end -function JuMP.dual(var_ref::JuMP.VariableRef; result::Int = 1) - JuMP.is_parameter(var_ref) || error("Variable is not a parameter") - return dual(ParameterRef(var_ref); result = result) -end - function MOI.get( model::JuMP.Model, attr::ReverseConstraintSet, From d4abb623e1c9db9133458f5f894aefe570b334c1 Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 27 Feb 2025 13:08:50 -0500 Subject: [PATCH 09/13] rm unecessary functions --- src/diff_opt.jl | 4 ---- 1 file changed, 4 deletions(-) diff --git a/src/diff_opt.jl b/src/diff_opt.jl index 8940ba93..15616b53 100644 --- a/src/diff_opt.jl +++ b/src/diff_opt.jl @@ -213,8 +213,6 @@ MOI.get(model, DiffOpt.ForwardConstraintDual(), ci) """ struct ForwardConstraintDual <: MOI.AbstractConstraintAttribute end -MOI.is_set_by_optimize(::ForwardConstraintDual) = true - """ ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute @@ -228,8 +226,6 @@ MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) """ struct ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute end -MOI.is_set_by_optimize(::ForwardObjectiveSensitivity) = true - """ ReverseObjectiveFunction <: MOI.AbstractModelAttribute From d0bd2c69f1b79fda34b406faba7a4fe22e5a417c Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 27 Feb 2025 13:45:35 -0500 Subject: [PATCH 10/13] read necessary functions --- src/diff_opt.jl | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/src/diff_opt.jl b/src/diff_opt.jl index 15616b53..8940ba93 100644 --- a/src/diff_opt.jl +++ b/src/diff_opt.jl @@ -213,6 +213,8 @@ MOI.get(model, DiffOpt.ForwardConstraintDual(), ci) """ struct ForwardConstraintDual <: MOI.AbstractConstraintAttribute end +MOI.is_set_by_optimize(::ForwardConstraintDual) = true + """ ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute @@ -226,6 +228,8 @@ MOI.get(model, DiffOpt.ForwardObjectiveSensitivity()) """ struct ForwardObjectiveSensitivity <: MOI.AbstractModelAttribute end +MOI.is_set_by_optimize(::ForwardObjectiveSensitivity) = true + """ ReverseObjectiveFunction <: MOI.AbstractModelAttribute From a1085993fa1033c7339441a402b137a41add5e4b Mon Sep 17 00:00:00 2001 From: Andrew Rosemberg Date: Fri, 28 Feb 2025 15:37:48 -0500 Subject: [PATCH 11/13] Update DiffOpt.jl --- src/DiffOpt.jl | 1 - 1 file changed, 1 deletion(-) diff --git a/src/DiffOpt.jl b/src/DiffOpt.jl index 519eef5b..0dce78d9 100644 --- a/src/DiffOpt.jl +++ b/src/DiffOpt.jl @@ -14,7 +14,6 @@ import MathOptInterface as MOI import MathOptSetDistances as MOSD import ParametricOptInterface as POI import SparseArrays -import JuMP: dual include("utils.jl") include("product_of_sets.jl") From ad3d72c3732871af5676aa0afd66c425faabdde8 Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 6 Mar 2025 10:47:26 -0500 Subject: [PATCH 12/13] fix bug indexing --- src/NonLinearProgram/NonLinearProgram.jl | 14 ++++++-------- 1 file changed, 6 insertions(+), 8 deletions(-) diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index d01f3196..2952f25f 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -31,7 +31,7 @@ Base.@kwdef struct ForwCache end Base.@kwdef struct ReverseCache - Δp::Vector{Float64} # Sensitivity for parameters + Δp::Dict{MOI.ConstraintIndex,Float64} # Sensitivity for parameters end # Define the form of the NLP @@ -580,12 +580,10 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) Δp = Δs' * Δw end - # Order by ConstraintIndex - varorder = - sort(collect(keys(form.var2ci)); by = x -> form.var2ci[x].value) - Δp = [Δp[form.var2param[var_idx].value] for var_idx in varorder] - - model.back_grad_cache = ReverseCache(; Δp = Δp) + Δp_dict = Dict{MOI.ConstraintIndex,Float64}( + form.var2ci[var_idx] => Δp[form.var2param[var_idx].value] for var_idx in keys(form.var2ci) + ) + model.back_grad_cache = ReverseCache(; Δp = Δp_dict) end return nothing end @@ -616,7 +614,7 @@ function MOI.get( ::DiffOpt.ReverseConstraintSet, ci::MOI.ConstraintIndex{MOI.VariableIndex,MOI.Parameter{T}}, ) where {T} - return MOI.Parameter{T}(model.back_grad_cache.Δp[ci.value]) + return MOI.Parameter{T}(model.back_grad_cache.Δp[ci]) end function MOI.get(model::Model, ::DiffOpt.ForwardObjectiveSensitivity) From a15f6f69d2280d90c02b7a1f2d8bf83b6e4d61bd Mon Sep 17 00:00:00 2001 From: Andrew Date: Thu, 6 Mar 2025 10:52:09 -0500 Subject: [PATCH 13/13] format --- src/NonLinearProgram/NonLinearProgram.jl | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/NonLinearProgram/NonLinearProgram.jl b/src/NonLinearProgram/NonLinearProgram.jl index 2952f25f..19309847 100644 --- a/src/NonLinearProgram/NonLinearProgram.jl +++ b/src/NonLinearProgram/NonLinearProgram.jl @@ -581,7 +581,8 @@ function DiffOpt.reverse_differentiate!(model::Model; tol = 1e-6) end Δp_dict = Dict{MOI.ConstraintIndex,Float64}( - form.var2ci[var_idx] => Δp[form.var2param[var_idx].value] for var_idx in keys(form.var2ci) + form.var2ci[var_idx] => Δp[form.var2param[var_idx].value] + for var_idx in keys(form.var2ci) ) model.back_grad_cache = ReverseCache(; Δp = Δp_dict) end