From fedad79fcf85e507d2074f55ccc1ea969de22237 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 11:33:48 +0100 Subject: [PATCH 01/20] Remove need for the MOI fork --- src/ArrayDiff.jl | 2 + src/parse_expression.jl | 120 ++++++++++++++++++++++++++++++++++++++++ src/reverse_mode.jl | 2 +- test/ArrayDiff.jl | 4 +- 4 files changed, 125 insertions(+), 3 deletions(-) create mode 100644 src/parse_expression.jl diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 1f08c1e..1c65db8 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -58,4 +58,6 @@ include("reverse_mode.jl") include("forward_over_reverse.jl") include("mathoptinterface_api.jl") +include("parse_expression.jl") + end # module diff --git a/src/parse_expression.jl b/src/parse_expression.jl new file mode 100644 index 0000000..e8b637f --- /dev/null +++ b/src/parse_expression.jl @@ -0,0 +1,120 @@ +# Inspired by MathOptInterface/src/Nonlinear/parse_expression.jl + +function set_objective(model::MOI.Nonlinear.Model, obj) + model.objective = parse_expression(model, obj) + return +end + +function model() + model = MOI.Nonlinear.Model() + append!(model.operators.multivariate_operators, [ + :vect, + :dot, + :hcat, + :vcat, + :norm, + :sum, + :row, + ]) + return moel +end + +function parse_expression(data::Model, input) + expr = Expression() + parse_expression(data, expr, input, -1) + return expr +end + +function parse_expression( + data::Model, + expr::Expression, + x::Expr, + parent_index::Int, +) + stack = Tuple{Int,Any}[] + push!(stack, (parent_index, x)) + while !isempty(stack) + parent, item = pop!(stack) + if item isa Expr + _parse_expression(stack, data, expr, item, parent) + else + parse_expression(data, expr, item, parent) + end + end + return +end + +function _parse_expression(stack, data, expr, x, parent_index) + if Meta.isexpr(x, :call) + if length(x.args) == 2 && !Meta.isexpr(x.args[2], :...) + MOI.Nonlinear._parse_univariate_expression(stack, data, expr, x, parent_index) + else + # The call is either n-ary, or it is a splat, in which case we + # cannot tell just yet whether the expression is unary or nary. + # Punt to multivariate and try to recover later. + MOI.Nonlinear._parse_multivariate_expression(stack, data, expr, x, parent_index) + end + elseif Meta.isexpr(x, :comparison) + MOI.Nonlinear._parse_comparison_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :...) + MOI.Nonlinear._parse_splat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :&&) || Meta.isexpr(x, :||) + MOI.Nonlinear._parse_logic_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :vect) + _parse_vect_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :hcat) + _parse_hcat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :vcat) + _parse_vcat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :row) + _parse_row_expression(stack, data, expr, x, parent_index) + elsval = @s f.forward_storage[ix] + @j f.forward_storage[k] = val + end + elseif node.index == 11 # dot + idx1e + error("Unsupported expression: $x") + end +end + +function eval_multivariate_function( + registry::OperatorRegistry, + op::Symbol, + x::AbstractVector{T}, +) where {T} + if op == :+ + return sum(x; init = zero(T)) + elseif op == :- + @assert length(x) == 2 + return x[1] - x[2] + elseif op == :* + return prod(x; init = one(T)) + elseif op == :^ + @assert length(x) == 2 + # Use _nan_pow here to avoid throwing an error in common situations like + # (-1.0)^1.5. + return _nan_pow(x[1], x[2]) + elseif op == :/ + @assert length(x) == 2 + return x[1] / x[2] + elseif op == :ifelse + @assert length(x) == 3 + return ifelse(Bool(x[1]), x[2], x[3]) + elseif op == :atan + @assert length(x) == 2 + return atan(x[1], x[2]) + elseif op == :min + return minimum(x) + elseif op == :max + return maximum(x) + elseif op == :vect + return x + end + id = registry.multivariate_operator_to_id[op] + offset = id - registry.multivariate_user_operator_start + operator = registry.registered_multivariate_operators[offset] + @assert length(x) == operator.N + ret = operator.f(x) + check_return_type(T, ret) + return ret::T +end diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index 2c7cfc7..1b13e17 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -331,7 +331,7 @@ function _forward_eval( f_input[r] = f.forward_storage[children_arr[i]] ∇f[r] = 0.0 end - f.forward_storage[k] = Nonlinear.eval_multivariate_function( + f.forward_storage[k] = eval_multivariate_function( operators, operators.multivariate_operators[node.index], f_input, diff --git a/test/ArrayDiff.jl b/test/ArrayDiff.jl index 5eab3cc..9e765e8 100644 --- a/test/ArrayDiff.jl +++ b/test/ArrayDiff.jl @@ -22,9 +22,9 @@ function runtests() end function test_objective_dot_univariate() - model = Nonlinear.Model() + model = ArrayDiff.model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(dot([$x], [$x]))) + ArrayDiff.set_objective(model, :(dot([$x], [$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes From d6b61560384d134cbeacfe19b47e02d8c829147a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 11:34:05 +0100 Subject: [PATCH 02/20] Update ci --- .github/workflows/ci.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6aaa77f..aae738c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,13 +25,6 @@ jobs: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - uses: julia-actions/cache@v1 - - name: MOI - shell: julia --project=@. {0} - run: | - using Pkg - Pkg.add([ - PackageSpec(name="MathOptInterface", rev="bl/arraydiff"), - ]) - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 with: From 06b3e00a9c551658e4b9e01e325c848a35f740b3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 11:36:21 +0100 Subject: [PATCH 03/20] Rename --- src/ArrayDiff.jl | 2 +- src/{parse_expression.jl => MOI_Nonlinear_fork.jl} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename src/{parse_expression.jl => MOI_Nonlinear_fork.jl} (100%) diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 1c65db8..3bb6c00 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -58,6 +58,6 @@ include("reverse_mode.jl") include("forward_over_reverse.jl") include("mathoptinterface_api.jl") -include("parse_expression.jl") +include("MOI_Nonlinear_fork.jl") end # module diff --git a/src/parse_expression.jl b/src/MOI_Nonlinear_fork.jl similarity index 100% rename from src/parse_expression.jl rename to src/MOI_Nonlinear_fork.jl From c871453408205cd0fa050a85b775b32b227b0845 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 12:55:18 +0100 Subject: [PATCH 04/20] Fixes --- src/MOI_Nonlinear_fork.jl | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index e8b637f..f0d36e9 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -19,15 +19,15 @@ function model() return moel end -function parse_expression(data::Model, input) +function parse_expression(data::MOI.Nonlinear.Model, input) expr = Expression() parse_expression(data, expr, input, -1) return expr end function parse_expression( - data::Model, - expr::Expression, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, x::Expr, parent_index::Int, ) @@ -68,17 +68,13 @@ function _parse_expression(stack, data, expr, x, parent_index) _parse_vcat_expression(stack, data, expr, x, parent_index) elseif Meta.isexpr(x, :row) _parse_row_expression(stack, data, expr, x, parent_index) - elsval = @s f.forward_storage[ix] - @j f.forward_storage[k] = val - end - elseif node.index == 11 # dot - idx1e + else error("Unsupported expression: $x") end end function eval_multivariate_function( - registry::OperatorRegistry, + registry::MOI.Nonlinear.OperatorRegistry, op::Symbol, x::AbstractVector{T}, ) where {T} From 8e67e16cca7d1949e925d52f49cfb4b2342b254d Mon Sep 17 00:00:00 2001 From: Sophie L Date: Tue, 6 Jan 2026 14:52:14 +0100 Subject: [PATCH 05/20] Correct typo and change model for Model --- src/MOI_Nonlinear_fork.jl | 6 ++--- test/ArrayDiff.jl | 54 +++++++++++++++++++-------------------- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index f0d36e9..3c3279a 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -5,7 +5,7 @@ function set_objective(model::MOI.Nonlinear.Model, obj) return end -function model() +function Model() model = MOI.Nonlinear.Model() append!(model.operators.multivariate_operators, [ :vect, @@ -16,11 +16,11 @@ function model() :sum, :row, ]) - return moel + return model end function parse_expression(data::MOI.Nonlinear.Model, input) - expr = Expression() + expr = MOI.Nonlinear.Expression() parse_expression(data, expr, input, -1) return expr end diff --git a/test/ArrayDiff.jl b/test/ArrayDiff.jl index 9e765e8..b9e3d8c 100644 --- a/test/ArrayDiff.jl +++ b/test/ArrayDiff.jl @@ -22,7 +22,7 @@ function runtests() end function test_objective_dot_univariate() - model = ArrayDiff.model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) ArrayDiff.set_objective(model, :(dot([$x], [$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -41,9 +41,9 @@ function test_objective_dot_univariate() end function test_objective_dot_univariate_and_scalar_mult() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(2*(dot([$x], [$x])))) + ArrayDiff.set_objective(model, :(2*(dot([$x], [$x])))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -60,10 +60,10 @@ function test_objective_dot_univariate_and_scalar_mult() end function test_objective_dot_bivariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(dot([$x, $y] - [1, 2], -[1, 2] + [$x, $y])), ) @@ -84,12 +84,12 @@ function test_objective_dot_bivariate() end function test_objective_hcat_scalars() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(dot([$x1 $x3], [$x2 $x4]))) + ArrayDiff.set_objective(model, :(dot([$x1 $x3], [$x2 $x4]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -109,12 +109,12 @@ function test_objective_hcat_scalars() end function test_objective_hcat_vectors() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(dot(hcat([$x1], [$x3]), hcat([$x2], [$x4]))), ) @@ -137,10 +137,10 @@ function test_objective_hcat_vectors() end function test_objective_dot_bivariate_on_rows() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(dot([$x $y] - [1 2], -[1 2] + [$x $y]))) + ArrayDiff.set_objective(model, :(dot([$x $y] - [1 2], -[1 2] + [$x $y]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -159,9 +159,9 @@ function test_objective_dot_bivariate_on_rows() end function test_objective_norm_univariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(norm([$x]))) + ArrayDiff.set_objective(model, :(norm([$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -178,10 +178,10 @@ function test_objective_norm_univariate() end function test_objective_norm_bivariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm([$x, $y]))) + ArrayDiff.set_objective(model, :(norm([$x, $y]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -203,10 +203,10 @@ function test_objective_norm_bivariate() end function test_objective_norm_of_row_vector() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm([$x1 $x2]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -224,12 +224,12 @@ function test_objective_norm_of_row_vector() end function test_objective_norm_of_vcat_vector() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(vcat($x1, $x3)))) + ArrayDiff.set_objective(model, :(norm(vcat($x1, $x3)))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -249,12 +249,12 @@ function test_objective_norm_of_vcat_vector() end function test_objective_norm_of_vcat_matrix() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(vcat([$x1 $x3], [$x2 $x4])))) + ArrayDiff.set_objective(model, :(norm(vcat([$x1 $x3], [$x2 $x4])))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -279,10 +279,10 @@ function test_objective_norm_of_vcat_matrix() end function test_objective_norm_of_row() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm(row($x1, $x2)))) + ArrayDiff.set_objective(model, :(norm(row($x1, $x2)))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -300,12 +300,12 @@ function test_objective_norm_of_row() end function test_objective_norm_of_matrix() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -330,12 +330,12 @@ function test_objective_norm_of_matrix() end function test_objective_norm_of_matrix_with_sum() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4] - [1 1; 1 1]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] - [1 1; 1 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes From c7345b01edbc119f103c93f1fa4e7acaf40f304b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 17:34:28 +0100 Subject: [PATCH 06/20] Fix tests --- src/MOI_Nonlinear_fork.jl | 68 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index 3c3279a..d465b6c 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -25,6 +25,8 @@ function parse_expression(data::MOI.Nonlinear.Model, input) return expr end +parse_expression(data, expr, item, parent) = MOI.Nonlinear.parse_expression(data, expr, item, parent) + function parse_expression( data::MOI.Nonlinear.Model, expr::MOI.Nonlinear.Expression, @@ -111,6 +113,70 @@ function eval_multivariate_function( operator = registry.registered_multivariate_operators[offset] @assert length(x) == operator.N ret = operator.f(x) - check_return_type(T, ret) + MOI.Nonlinear.check_return_type(T, ret) return ret::T end + +function _parse_vect_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :vect) + id = get(data.operators.multivariate_operator_to_id, :vect, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_row_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :row) + id = get(data.operators.multivariate_operator_to_id, :row, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_hcat_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :hcat) + id = get(data.operators.multivariate_operator_to_id, :hcat, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_vcat_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :vcat) + id = get(data.operators.multivariate_operator_to_id, :vcat, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end From e165a0e1fe58e09f0dfba4928018f465d698e6d8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 17:34:43 +0100 Subject: [PATCH 07/20] Fix format --- src/MOI_Nonlinear_fork.jl | 93 +++++++++++++++++++++++++++++++-------- 1 file changed, 74 insertions(+), 19 deletions(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index d465b6c..08e29dc 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -7,15 +7,10 @@ end function Model() model = MOI.Nonlinear.Model() - append!(model.operators.multivariate_operators, [ - :vect, - :dot, - :hcat, - :vcat, - :norm, - :sum, - :row, - ]) + append!( + model.operators.multivariate_operators, + [:vect, :dot, :hcat, :vcat, :norm, :sum, :row], + ) return model end @@ -25,7 +20,9 @@ function parse_expression(data::MOI.Nonlinear.Model, input) return expr end -parse_expression(data, expr, item, parent) = MOI.Nonlinear.parse_expression(data, expr, item, parent) +function parse_expression(data, expr, item, parent) + return MOI.Nonlinear.parse_expression(data, expr, item, parent) +end function parse_expression( data::MOI.Nonlinear.Model, @@ -49,19 +46,49 @@ end function _parse_expression(stack, data, expr, x, parent_index) if Meta.isexpr(x, :call) if length(x.args) == 2 && !Meta.isexpr(x.args[2], :...) - MOI.Nonlinear._parse_univariate_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_univariate_expression( + stack, + data, + expr, + x, + parent_index, + ) else # The call is either n-ary, or it is a splat, in which case we # cannot tell just yet whether the expression is unary or nary. # Punt to multivariate and try to recover later. - MOI.Nonlinear._parse_multivariate_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_multivariate_expression( + stack, + data, + expr, + x, + parent_index, + ) end elseif Meta.isexpr(x, :comparison) - MOI.Nonlinear._parse_comparison_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_comparison_expression( + stack, + data, + expr, + x, + parent_index, + ) elseif Meta.isexpr(x, :...) - MOI.Nonlinear._parse_splat_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_splat_expression( + stack, + data, + expr, + x, + parent_index, + ) elseif Meta.isexpr(x, :&&) || Meta.isexpr(x, :||) - MOI.Nonlinear._parse_logic_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_logic_expression( + stack, + data, + expr, + x, + parent_index, + ) elseif Meta.isexpr(x, :vect) _parse_vect_expression(stack, data, expr, x, parent_index) elseif Meta.isexpr(x, :hcat) @@ -126,7 +153,14 @@ function _parse_vect_expression( ) @assert Meta.isexpr(x, :vect) id = get(data.operators.multivariate_operator_to_id, :vect, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end @@ -142,7 +176,14 @@ function _parse_row_expression( ) @assert Meta.isexpr(x, :row) id = get(data.operators.multivariate_operator_to_id, :row, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end @@ -158,7 +199,14 @@ function _parse_hcat_expression( ) @assert Meta.isexpr(x, :hcat) id = get(data.operators.multivariate_operator_to_id, :hcat, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end @@ -174,7 +222,14 @@ function _parse_vcat_expression( ) @assert Meta.isexpr(x, :vcat) id = get(data.operators.multivariate_operator_to_id, :vcat, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end From b8f31cf616f2af2af46418fbfbc4dd72c8067480 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 17:45:44 +0100 Subject: [PATCH 08/20] Fixes --- src/reverse_mode.jl | 4 ++ test/ReverseAD.jl | 104 ++++++++++++++++++++++---------------------- 2 files changed, 56 insertions(+), 52 deletions(-) diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index 1b13e17..eaf85bb 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -29,6 +29,10 @@ single pass through the tree by iterating forwards through the vector of stored nodes. """ function _reverse_mode(d::NLPEvaluator, x) + # Because the operators are checked with `Int` and not `Symbol` + # if we get a model that didn't add our new operators but had user-defined + # operators, we will think that these are one of our new operators + @assert :vect in d.data.operators.multivariate_operators if d.last_x == x # Fail fast if the primal solution has not changed since last call. return diff --git a/test/ReverseAD.jl b/test/ReverseAD.jl index 552ff3f..bf88318 100644 --- a/test/ReverseAD.jl +++ b/test/ReverseAD.jl @@ -29,7 +29,7 @@ end function test_objective_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + 1)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -59,7 +59,7 @@ end function test_objective_and_constraints_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + 1)) Nonlinear.add_constraint(model, :($x^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -96,7 +96,7 @@ end function test_objective_quadratic_multivariate() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + $x * $y + $y^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -130,7 +130,7 @@ end function test_objective_quadratic_multivariate_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x^2)) ey = Nonlinear.add_expression(model, :($y^2)) exy = Nonlinear.add_expression(model, :($ex + $x * $y)) @@ -175,7 +175,7 @@ end function test_objective_ifelse_comparison() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse(1 <= $x <= 2, $x^2, $y^2))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -192,7 +192,7 @@ end function test_objective_ifelse_logic() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse(1 <= $x && $x <= 2, $x^2, $y^2))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -208,7 +208,7 @@ end function test_objective_parameter() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() p = Nonlinear.add_parameter(model, 1.2) Nonlinear.set_objective(model, :($p * $x)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -221,7 +221,7 @@ function test_objective_parameter() end function test_objective_subexpression() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) input = :($x^2 + 1) expr = Nonlinear.add_expression(model, input) @@ -238,7 +238,7 @@ end function test_constraint_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -264,7 +264,7 @@ end function test_constraint_quadratic_multivariate() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2 + $x * $y + $y^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -287,7 +287,7 @@ end function test_constraint_quadratic_multivariate_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x^2)) ey = Nonlinear.add_expression(model, :($y^2)) exy = Nonlinear.add_expression(model, :($ex + $x * $y)) @@ -336,7 +336,7 @@ function test_hessian_sparsity_registered_function() H[2, 2] = 2 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :f, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(f($x, $z) + $y^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y, z]) @@ -366,7 +366,7 @@ function test_hessian_sparsity_registered_rosenbrock() H[2, 2] = 200.0 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :rosenbrock, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(rosenbrock($x, $y))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -396,7 +396,7 @@ function test_hessian_registered_error() H[2, 2] = 200.0 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :rosenbrock, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(rosenbrock($x, $y))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -494,7 +494,7 @@ end function test_derivatives() a = MOI.VariableIndex(1) b = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sin($a^2) + cos($b * 4) / 5 - 2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [a, b]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -516,7 +516,7 @@ function test_derivatives() end function test_NLPBlockData() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) Nonlinear.add_constraint(model, :($x - 1), MOI.LessThan(0.0)) Nonlinear.add_constraint(model, :($x - 2), MOI.GreaterThan(0.0)) @@ -540,7 +540,7 @@ function test_linearity() z = MOI.VariableIndex(3) variables = Dict(x => 1, y => 2, z => 3) function _test_linearity(input, test_value, IJ = [], indices = []) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, input) expr = model[ex] adj = Nonlinear.adjacency_matrix(expr.nodes) @@ -631,7 +631,7 @@ end function test_linearity_no_hess() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x + 1)) Nonlinear.set_objective(model, ex) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -647,7 +647,7 @@ function test_dual_forward() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) function _test_dual_forward(input, x_input, test_value) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, input) evaluator = Nonlinear.Evaluator( model, @@ -687,7 +687,7 @@ function test_gradient_registered_function() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) z = MOI.VariableIndex(3) - model = Nonlinear.Model() + model = ArrayDiff.Model() f(x, y) = (1 / 3) * y^3 - 2x^2 function ∇f(g, x, y) g[1] = -4x @@ -714,7 +714,7 @@ end function test_gradient_jump_855() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective( model, :(ifelse($x <= 3.0, ($x - 2.0)^2, 2 * log($x - 2.0) + 1.0)), @@ -732,7 +732,7 @@ end function test_gradient_abs() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(abs($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -748,7 +748,7 @@ end function test_gradient_trig() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sin($x^2) + cos($y * 4) / 5 - 2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y]) @@ -761,7 +761,7 @@ end function test_gradient_logical() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x > 0.5 && $x < 0.9)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -775,7 +775,7 @@ end function test_gradient_ifelse() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse($x >= 0.5 || $x < 0.1, $x, 5))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -795,7 +795,7 @@ end function test_gradient_sqrt_nan() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sqrt($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -811,7 +811,7 @@ function test_gradient_variable_power() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) z = MOI.VariableIndex(3) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :((1 / $x)^$y - $z)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y, z]) @@ -830,7 +830,7 @@ end function test_single_parameter() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() p = Nonlinear.add_parameter(model, 105.2) Nonlinear.set_objective(model, :($p)) evaluator = @@ -843,7 +843,7 @@ end function test_gradient_nested_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex1 = Nonlinear.add_expression(model, :(sin($x^2) + cos($y * 4) / 5 - 2.0)) ex2 = Nonlinear.add_expression(model, :($ex1)) Nonlinear.set_objective(model, ex2) @@ -859,7 +859,7 @@ end function test_gradient_view() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(($x - 1)^2 + 4 * ($y - $x^2)^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y]) @@ -902,7 +902,7 @@ function test_odd_chunks_Hessian_products() end function _test_odd_chunks_Hessian_products(N) - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:N) Nonlinear.set_objective(model, Expr(:call, :*, x...)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), x) @@ -929,7 +929,7 @@ function _dense_jacobian(jacobian_sparsity, V, m, n) end function test_jacobians_and_jacvec() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:3) a, b, c = x Nonlinear.set_objective(model, :($a * $b + $c^2)) @@ -960,7 +960,7 @@ function test_jacobians_and_jacvec() end function test_jacobians_and_jacvec_with_subexpressions() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:3) a, b, c = x bc = Nonlinear.add_expression(model, :($b * $c)) @@ -993,7 +993,7 @@ end function test_pow_complex_result() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse($x > 0, $x^1.5, -(-$x)^1.5))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1012,7 +1012,7 @@ end function test_constraint_gradient() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2 + $x * $y + $y^2), MOI.LessThan(2.0)) Nonlinear.add_constraint(model, :(cos($y)), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -1032,7 +1032,7 @@ end function test_hessian_length() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(log($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Hess]) @@ -1050,7 +1050,7 @@ end function test_jacobian_length() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :(sin($x)), MOI.LessThan(0.5)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Jac]) @@ -1061,7 +1061,7 @@ end function test_timers() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(log($x))) Nonlinear.add_constraint(model, :(sin($x)), MOI.LessThan(0.5)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1101,7 +1101,7 @@ function test_timers() end function test_varying_length_x() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) MOI.Nonlinear.set_objective(model, :(sin($x))) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1116,7 +1116,7 @@ end function test_univariate_operator_with_no_second_order() f(x::Float64) = x^2 df(x::Float64) = 2 * x - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.register_operator(model, :op_f, 1, f, df) x = MOI.VariableIndex(1) MOI.Nonlinear.add_constraint(model, :(op_f($x)), MOI.LessThan(2.0)) @@ -1130,7 +1130,7 @@ function test_univariate_operator_with_no_second_order() end function test_no_objective() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) @@ -1147,7 +1147,7 @@ function test_no_objective() end function test_x_power_1() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) MOI.Nonlinear.set_objective(model, :($x^1)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1160,7 +1160,7 @@ function test_x_power_1() end function test_variable_first_node_in_tape() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) expr = MOI.Nonlinear.add_expression(model, :($x)) MOI.Nonlinear.set_objective(model, :(sin($expr))) @@ -1173,7 +1173,7 @@ function test_variable_first_node_in_tape() end function test_subexpression_first_node_in_tape() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) expr = MOI.Nonlinear.add_expression(model, :($x)) expr2 = MOI.Nonlinear.add_expression(model, :($expr)) @@ -1187,7 +1187,7 @@ function test_subexpression_first_node_in_tape() end function test_parameter_in_hessian() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) p = MOI.Nonlinear.add_parameter(model, 3.0) MOI.Nonlinear.set_objective(model, :(sin($x + $p))) @@ -1213,7 +1213,7 @@ end function test_classify_linearity_ifelse() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.set_objective(model, :(ifelse($y, $x, 1))) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1226,7 +1226,7 @@ end function test_classify_linearity_logic() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.set_objective(model, :($x && $y)) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1241,7 +1241,7 @@ end function test_hessian_sparsity_with_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() expr = MOI.Nonlinear.add_expression(model, :($x * $y)) expr2 = MOI.Nonlinear.add_expression(model, :($expr)) MOI.Nonlinear.set_objective(model, :(sin($expr2))) @@ -1253,7 +1253,7 @@ end function test_toposort_subexpressions() x = MOI.VariableIndex(1) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() a = MOI.Nonlinear.add_expression(model, :($x)) b = MOI.Nonlinear.add_expression(model, :($x)) c = MOI.Nonlinear.add_expression(model, :($a + $b)) @@ -1269,7 +1269,7 @@ function test_toposort_subexpressions() end function test_eval_user_defined_operator_ForwardDiff_gradient!() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:4) p = MOI.Nonlinear.add_parameter(model, 2.0) ex = MOI.Nonlinear.add_expression(model, :($p * $(x[1]))) @@ -1296,7 +1296,7 @@ function test_eval_user_defined_operator_ForwardDiff_gradient!() end function test_eval_user_defined_operator_type_mismatch() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:4) p = MOI.Nonlinear.add_parameter(model, 2.0) ex = MOI.Nonlinear.add_expression(model, :($p * $(x[1]))) @@ -1342,7 +1342,7 @@ function test_generate_hessian_slice_inner() end function test_hessian_reinterpret_unsafe() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:5) MOI.Nonlinear.add_constraint( model, From 69dbb4fc953a67fbb15e3eebbd78fa1f289dceec Mon Sep 17 00:00:00 2001 From: Sophie L Date: Thu, 8 Jan 2026 12:37:21 +0100 Subject: [PATCH 09/20] Fix problems * Add OperatorRegistry because immutable in MOI Nonlinear (and Int fields will need to be modified) * Add DEFAULT_MULTIVARIATE_OPERATORS to extend it from MOI Nonlinear * Add OrderedCollections that was used in Model --- Project.toml | 1 + src/ArrayDiff.jl | 1 + src/MOI_Nonlinear_fork.jl | 117 +++++++++++++++++++++++++++++++++++--- src/reverse_mode.jl | 5 +- src/sizes.jl | 7 +-- test/Project.toml | 1 + 6 files changed, 115 insertions(+), 17 deletions(-) diff --git a/Project.toml b/Project.toml index 014259e..e45e9b1 100644 --- a/Project.toml +++ b/Project.toml @@ -9,6 +9,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" [compat] DataStructures = "0.18, 0.19" diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 3bb6c00..7008b8b 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -10,6 +10,7 @@ import ForwardDiff import MathOptInterface as MOI const Nonlinear = MOI.Nonlinear import SparseArrays +import OrderedCollections: OrderedDict """ Mode() <: AbstractAutomaticDifferentiation diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index 08e29dc..112e443 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -1,19 +1,118 @@ # Inspired by MathOptInterface/src/Nonlinear/parse_expression.jl +const DEFAULT_MULTIVARIATE_OPERATORS = [ + :+, + :-, + :*, + :^, + :/, + :ifelse, + :atan, + :min, + :max, + :vect, + :dot, + :hcat, + :vcat, + :norm, + :sum, + :row, +] + +struct OperatorRegistry + # NODE_CALL_UNIVARIATE + univariate_operators::Vector{Symbol} + univariate_operator_to_id::Dict{Symbol,Int} + univariate_user_operator_start::Int + registered_univariate_operators::Vector{MOI.Nonlinear._UnivariateOperator} + # NODE_CALL_MULTIVARIATE + multivariate_operators::Vector{Symbol} + multivariate_operator_to_id::Dict{Symbol,Int} + multivariate_user_operator_start::Int + registered_multivariate_operators::Vector{ + MOI.Nonlinear._MultivariateOperator, + } + # NODE_LOGIC + logic_operators::Vector{Symbol} + logic_operator_to_id::Dict{Symbol,Int} + # NODE_COMPARISON + comparison_operators::Vector{Symbol} + comparison_operator_to_id::Dict{Symbol,Int} + function OperatorRegistry() + univariate_operators = copy(DEFAULT_UNIVARIATE_OPERATORS) + multivariate_operators = copy(DEFAULT_MULTIVARIATE_OPERATORS) + logic_operators = [:&&, :||] + comparison_operators = [:<=, :(==), :>=, :<, :>] + return new( + # NODE_CALL_UNIVARIATE + univariate_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(univariate_operators) + ), + length(univariate_operators), + _UnivariateOperator[], + # NODE_CALL + multivariate_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(multivariate_operators) + ), + length(multivariate_operators), + _MultivariateOperator[], + # NODE_LOGIC + logic_operators, + Dict{Symbol,Int}(op => i for (i, op) in enumerate(logic_operators)), + # NODE_COMPARISON + comparison_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(comparison_operators) + ), + ) + end +end + +""" + Model() + +The core datastructure for representing a nonlinear optimization problem. + +It has the following fields: + * `objective::Union{Nothing,Expression}` : holds the nonlinear objective + function, if one exists, otherwise `nothing`. + * `expressions::Vector{Expression}` : a vector of expressions in the model. + * `constraints::OrderedDict{ConstraintIndex,Constraint}` : a map from + [`ConstraintIndex`](@ref) to the corresponding [`Constraint`](@ref). An + `OrderedDict` is used instead of a `Vector` to support constraint deletion. + * `parameters::Vector{Float64}` : holds the current values of the parameters. + * `operators::OperatorRegistry` : stores the operators used in the model. +""" +mutable struct Model + objective::Union{Nothing,MOI.Nonlinear.Expression} + expressions::Vector{MOI.Nonlinear.Expression} + constraints::OrderedDict{ + MOI.Nonlinear.ConstraintIndex, + MOI.Nonlinear.Constraint, + } + parameters::Vector{Float64} + operators::OperatorRegistry + # This is a private field, used only to increment the ConstraintIndex. + last_constraint_index::Int64 + function Model() + model = MOI.Nonlinear.Model() + ops = [:vect, :dot, :hcat, :vcat, :norm, :sum, :row] + start = length(model.operators.multivariate_operators) + append!(model.operators.multivariate_operators, ops) + for (i, op) in enumerate(ops) + model.operators.multivariate_operator_to_id[op] = start + i + end + return model + end +end + function set_objective(model::MOI.Nonlinear.Model, obj) model.objective = parse_expression(model, obj) return end -function Model() - model = MOI.Nonlinear.Model() - append!( - model.operators.multivariate_operators, - [:vect, :dot, :hcat, :vcat, :norm, :sum, :row], - ) - return model -end - function parse_expression(data::MOI.Nonlinear.Model, input) expr = MOI.Nonlinear.Expression() parse_expression(data, expr, input, -1) diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index eaf85bb..2de62c8 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -430,9 +430,8 @@ function _reverse_eval(f::_SubexpressionStorage) node = f.nodes[k] children_indices = SparseArrays.nzrange(f.adj, k) if node.type == MOI.Nonlinear.NODE_CALL_MULTIVARIATE - if node.index in - eachindex(MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS) - op = MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS[node.index] + if node.index in eachindex(DEFAULT_MULTIVARIATE_OPERATORS) + op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :vect @assert _eachindex(f.sizes, k) == eachindex(children_indices) diff --git a/src/sizes.jl b/src/sizes.jl index a747656..98c7f96 100644 --- a/src/sizes.jl +++ b/src/sizes.jl @@ -163,14 +163,11 @@ function _infer_sizes( children_indices = SparseArrays.nzrange(adj, k) N = length(children_indices) if node.type == Nonlinear.NODE_CALL_MULTIVARIATE - if !( - node.index in - eachindex(MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS) - ) + if !(node.index in eachindex(DEFAULT_MULTIVARIATE_OPERATORS)) # TODO user-defined operators continue end - op = MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS[node.index] + op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :vect _assert_scalar_children( sizes, diff --git a/test/Project.toml b/test/Project.toml index 7ed96af..d66f113 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -4,3 +4,4 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" \ No newline at end of file From 8cc4f33e55749c57d718014e5e9841a4d996a920 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 11:33:48 +0100 Subject: [PATCH 10/20] Remove need for the MOI fork --- src/ArrayDiff.jl | 2 + src/parse_expression.jl | 120 ++++++++++++++++++++++++++++++++++++++++ src/reverse_mode.jl | 2 +- test/ArrayDiff.jl | 4 +- 4 files changed, 125 insertions(+), 3 deletions(-) create mode 100644 src/parse_expression.jl diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 1f08c1e..1c65db8 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -58,4 +58,6 @@ include("reverse_mode.jl") include("forward_over_reverse.jl") include("mathoptinterface_api.jl") +include("parse_expression.jl") + end # module diff --git a/src/parse_expression.jl b/src/parse_expression.jl new file mode 100644 index 0000000..e8b637f --- /dev/null +++ b/src/parse_expression.jl @@ -0,0 +1,120 @@ +# Inspired by MathOptInterface/src/Nonlinear/parse_expression.jl + +function set_objective(model::MOI.Nonlinear.Model, obj) + model.objective = parse_expression(model, obj) + return +end + +function model() + model = MOI.Nonlinear.Model() + append!(model.operators.multivariate_operators, [ + :vect, + :dot, + :hcat, + :vcat, + :norm, + :sum, + :row, + ]) + return moel +end + +function parse_expression(data::Model, input) + expr = Expression() + parse_expression(data, expr, input, -1) + return expr +end + +function parse_expression( + data::Model, + expr::Expression, + x::Expr, + parent_index::Int, +) + stack = Tuple{Int,Any}[] + push!(stack, (parent_index, x)) + while !isempty(stack) + parent, item = pop!(stack) + if item isa Expr + _parse_expression(stack, data, expr, item, parent) + else + parse_expression(data, expr, item, parent) + end + end + return +end + +function _parse_expression(stack, data, expr, x, parent_index) + if Meta.isexpr(x, :call) + if length(x.args) == 2 && !Meta.isexpr(x.args[2], :...) + MOI.Nonlinear._parse_univariate_expression(stack, data, expr, x, parent_index) + else + # The call is either n-ary, or it is a splat, in which case we + # cannot tell just yet whether the expression is unary or nary. + # Punt to multivariate and try to recover later. + MOI.Nonlinear._parse_multivariate_expression(stack, data, expr, x, parent_index) + end + elseif Meta.isexpr(x, :comparison) + MOI.Nonlinear._parse_comparison_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :...) + MOI.Nonlinear._parse_splat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :&&) || Meta.isexpr(x, :||) + MOI.Nonlinear._parse_logic_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :vect) + _parse_vect_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :hcat) + _parse_hcat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :vcat) + _parse_vcat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :row) + _parse_row_expression(stack, data, expr, x, parent_index) + elsval = @s f.forward_storage[ix] + @j f.forward_storage[k] = val + end + elseif node.index == 11 # dot + idx1e + error("Unsupported expression: $x") + end +end + +function eval_multivariate_function( + registry::OperatorRegistry, + op::Symbol, + x::AbstractVector{T}, +) where {T} + if op == :+ + return sum(x; init = zero(T)) + elseif op == :- + @assert length(x) == 2 + return x[1] - x[2] + elseif op == :* + return prod(x; init = one(T)) + elseif op == :^ + @assert length(x) == 2 + # Use _nan_pow here to avoid throwing an error in common situations like + # (-1.0)^1.5. + return _nan_pow(x[1], x[2]) + elseif op == :/ + @assert length(x) == 2 + return x[1] / x[2] + elseif op == :ifelse + @assert length(x) == 3 + return ifelse(Bool(x[1]), x[2], x[3]) + elseif op == :atan + @assert length(x) == 2 + return atan(x[1], x[2]) + elseif op == :min + return minimum(x) + elseif op == :max + return maximum(x) + elseif op == :vect + return x + end + id = registry.multivariate_operator_to_id[op] + offset = id - registry.multivariate_user_operator_start + operator = registry.registered_multivariate_operators[offset] + @assert length(x) == operator.N + ret = operator.f(x) + check_return_type(T, ret) + return ret::T +end diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index 505421a..47cf391 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -357,7 +357,7 @@ function _forward_eval( f_input[r] = f.forward_storage[children_arr[i]] ∇f[r] = 0.0 end - f.forward_storage[k] = Nonlinear.eval_multivariate_function( + f.forward_storage[k] = eval_multivariate_function( operators, operators.multivariate_operators[node.index], f_input, diff --git a/test/ArrayDiff.jl b/test/ArrayDiff.jl index 4b90f3b..f888930 100644 --- a/test/ArrayDiff.jl +++ b/test/ArrayDiff.jl @@ -22,9 +22,9 @@ function runtests() end function test_objective_dot_univariate() - model = Nonlinear.Model() + model = ArrayDiff.model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(dot([$x], [$x]))) + ArrayDiff.set_objective(model, :(dot([$x], [$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Hess]) sizes = evaluator.backend.objective.expr.sizes From daecc2bebcfdb4c68bf30b8db5a3c32ef7da02a8 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 11:34:05 +0100 Subject: [PATCH 11/20] Update ci --- .github/workflows/ci.yml | 7 ------- 1 file changed, 7 deletions(-) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6aaa77f..aae738c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,13 +25,6 @@ jobs: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - uses: julia-actions/cache@v1 - - name: MOI - shell: julia --project=@. {0} - run: | - using Pkg - Pkg.add([ - PackageSpec(name="MathOptInterface", rev="bl/arraydiff"), - ]) - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 with: From 678da5ee2eef8e20a61a021b72fe14a9ded4066d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 11:36:21 +0100 Subject: [PATCH 12/20] Rename --- src/ArrayDiff.jl | 2 +- src/{parse_expression.jl => MOI_Nonlinear_fork.jl} | 0 2 files changed, 1 insertion(+), 1 deletion(-) rename src/{parse_expression.jl => MOI_Nonlinear_fork.jl} (100%) diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 1c65db8..3bb6c00 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -58,6 +58,6 @@ include("reverse_mode.jl") include("forward_over_reverse.jl") include("mathoptinterface_api.jl") -include("parse_expression.jl") +include("MOI_Nonlinear_fork.jl") end # module diff --git a/src/parse_expression.jl b/src/MOI_Nonlinear_fork.jl similarity index 100% rename from src/parse_expression.jl rename to src/MOI_Nonlinear_fork.jl From e40b878710543d3f2d823c77dc0d43a427212d96 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 12:55:18 +0100 Subject: [PATCH 13/20] Fixes --- src/MOI_Nonlinear_fork.jl | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index e8b637f..f0d36e9 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -19,15 +19,15 @@ function model() return moel end -function parse_expression(data::Model, input) +function parse_expression(data::MOI.Nonlinear.Model, input) expr = Expression() parse_expression(data, expr, input, -1) return expr end function parse_expression( - data::Model, - expr::Expression, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, x::Expr, parent_index::Int, ) @@ -68,17 +68,13 @@ function _parse_expression(stack, data, expr, x, parent_index) _parse_vcat_expression(stack, data, expr, x, parent_index) elseif Meta.isexpr(x, :row) _parse_row_expression(stack, data, expr, x, parent_index) - elsval = @s f.forward_storage[ix] - @j f.forward_storage[k] = val - end - elseif node.index == 11 # dot - idx1e + else error("Unsupported expression: $x") end end function eval_multivariate_function( - registry::OperatorRegistry, + registry::MOI.Nonlinear.OperatorRegistry, op::Symbol, x::AbstractVector{T}, ) where {T} From ae806d2435d2ac916cb580b547812475058a5a4e Mon Sep 17 00:00:00 2001 From: Sophie L Date: Tue, 6 Jan 2026 14:52:14 +0100 Subject: [PATCH 14/20] Correct typo and change model for Model --- src/MOI_Nonlinear_fork.jl | 6 ++--- test/ArrayDiff.jl | 54 +++++++++++++++++++-------------------- 2 files changed, 30 insertions(+), 30 deletions(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index f0d36e9..3c3279a 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -5,7 +5,7 @@ function set_objective(model::MOI.Nonlinear.Model, obj) return end -function model() +function Model() model = MOI.Nonlinear.Model() append!(model.operators.multivariate_operators, [ :vect, @@ -16,11 +16,11 @@ function model() :sum, :row, ]) - return moel + return model end function parse_expression(data::MOI.Nonlinear.Model, input) - expr = Expression() + expr = MOI.Nonlinear.Expression() parse_expression(data, expr, input, -1) return expr end diff --git a/test/ArrayDiff.jl b/test/ArrayDiff.jl index f888930..ffc039f 100644 --- a/test/ArrayDiff.jl +++ b/test/ArrayDiff.jl @@ -22,7 +22,7 @@ function runtests() end function test_objective_dot_univariate() - model = ArrayDiff.model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) ArrayDiff.set_objective(model, :(dot([$x], [$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -41,9 +41,9 @@ function test_objective_dot_univariate() end function test_objective_dot_univariate_and_scalar_mult() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(2*(dot([$x], [$x])))) + ArrayDiff.set_objective(model, :(2*(dot([$x], [$x])))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -60,10 +60,10 @@ function test_objective_dot_univariate_and_scalar_mult() end function test_objective_dot_bivariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(dot([$x, $y] - [1, 2], -[1, 2] + [$x, $y])), ) @@ -84,12 +84,12 @@ function test_objective_dot_bivariate() end function test_objective_hcat_scalars() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(dot([$x1 $x3], [$x2 $x4]))) + ArrayDiff.set_objective(model, :(dot([$x1 $x3], [$x2 $x4]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -109,12 +109,12 @@ function test_objective_hcat_scalars() end function test_objective_hcat_vectors() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(dot(hcat([$x1], [$x3]), hcat([$x2], [$x4]))), ) @@ -137,10 +137,10 @@ function test_objective_hcat_vectors() end function test_objective_dot_bivariate_on_rows() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(dot([$x $y] - [1 2], -[1 2] + [$x $y]))) + ArrayDiff.set_objective(model, :(dot([$x $y] - [1 2], -[1 2] + [$x $y]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -159,9 +159,9 @@ function test_objective_dot_bivariate_on_rows() end function test_objective_norm_univariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(norm([$x]))) + ArrayDiff.set_objective(model, :(norm([$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -178,10 +178,10 @@ function test_objective_norm_univariate() end function test_objective_norm_bivariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm([$x, $y]))) + ArrayDiff.set_objective(model, :(norm([$x, $y]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -203,10 +203,10 @@ function test_objective_norm_bivariate() end function test_objective_norm_of_row_vector() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm([$x1 $x2]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -224,12 +224,12 @@ function test_objective_norm_of_row_vector() end function test_objective_norm_of_vcat_vector() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(vcat($x1, $x3)))) + ArrayDiff.set_objective(model, :(norm(vcat($x1, $x3)))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -249,12 +249,12 @@ function test_objective_norm_of_vcat_vector() end function test_objective_norm_of_vcat_matrix() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(vcat([$x1 $x3], [$x2 $x4])))) + ArrayDiff.set_objective(model, :(norm(vcat([$x1 $x3], [$x2 $x4])))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -279,10 +279,10 @@ function test_objective_norm_of_vcat_matrix() end function test_objective_norm_of_row() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm(row($x1, $x2)))) + ArrayDiff.set_objective(model, :(norm(row($x1, $x2)))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -300,12 +300,12 @@ function test_objective_norm_of_row() end function test_objective_norm_of_matrix() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -330,12 +330,12 @@ function test_objective_norm_of_matrix() end function test_objective_norm_of_matrix_with_sum() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4] - [1 1; 1 1]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] - [1 1; 1 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes From bb2d59da582ab39ea51572fe91371cf4c8e90730 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 17:34:28 +0100 Subject: [PATCH 15/20] Fix tests --- src/MOI_Nonlinear_fork.jl | 68 ++++++++++++++++++++++++++++++++++++++- 1 file changed, 67 insertions(+), 1 deletion(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index 3c3279a..d465b6c 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -25,6 +25,8 @@ function parse_expression(data::MOI.Nonlinear.Model, input) return expr end +parse_expression(data, expr, item, parent) = MOI.Nonlinear.parse_expression(data, expr, item, parent) + function parse_expression( data::MOI.Nonlinear.Model, expr::MOI.Nonlinear.Expression, @@ -111,6 +113,70 @@ function eval_multivariate_function( operator = registry.registered_multivariate_operators[offset] @assert length(x) == operator.N ret = operator.f(x) - check_return_type(T, ret) + MOI.Nonlinear.check_return_type(T, ret) return ret::T end + +function _parse_vect_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :vect) + id = get(data.operators.multivariate_operator_to_id, :vect, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_row_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :row) + id = get(data.operators.multivariate_operator_to_id, :row, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_hcat_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :hcat) + id = get(data.operators.multivariate_operator_to_id, :hcat, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_vcat_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :vcat) + id = get(data.operators.multivariate_operator_to_id, :vcat, nothing) + push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end From 5ebb925ae1db496bf0aa2b9b26965493acd70df9 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 17:34:43 +0100 Subject: [PATCH 16/20] Fix format --- src/MOI_Nonlinear_fork.jl | 93 +++++++++++++++++++++++++++++++-------- 1 file changed, 74 insertions(+), 19 deletions(-) diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index d465b6c..08e29dc 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -7,15 +7,10 @@ end function Model() model = MOI.Nonlinear.Model() - append!(model.operators.multivariate_operators, [ - :vect, - :dot, - :hcat, - :vcat, - :norm, - :sum, - :row, - ]) + append!( + model.operators.multivariate_operators, + [:vect, :dot, :hcat, :vcat, :norm, :sum, :row], + ) return model end @@ -25,7 +20,9 @@ function parse_expression(data::MOI.Nonlinear.Model, input) return expr end -parse_expression(data, expr, item, parent) = MOI.Nonlinear.parse_expression(data, expr, item, parent) +function parse_expression(data, expr, item, parent) + return MOI.Nonlinear.parse_expression(data, expr, item, parent) +end function parse_expression( data::MOI.Nonlinear.Model, @@ -49,19 +46,49 @@ end function _parse_expression(stack, data, expr, x, parent_index) if Meta.isexpr(x, :call) if length(x.args) == 2 && !Meta.isexpr(x.args[2], :...) - MOI.Nonlinear._parse_univariate_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_univariate_expression( + stack, + data, + expr, + x, + parent_index, + ) else # The call is either n-ary, or it is a splat, in which case we # cannot tell just yet whether the expression is unary or nary. # Punt to multivariate and try to recover later. - MOI.Nonlinear._parse_multivariate_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_multivariate_expression( + stack, + data, + expr, + x, + parent_index, + ) end elseif Meta.isexpr(x, :comparison) - MOI.Nonlinear._parse_comparison_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_comparison_expression( + stack, + data, + expr, + x, + parent_index, + ) elseif Meta.isexpr(x, :...) - MOI.Nonlinear._parse_splat_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_splat_expression( + stack, + data, + expr, + x, + parent_index, + ) elseif Meta.isexpr(x, :&&) || Meta.isexpr(x, :||) - MOI.Nonlinear._parse_logic_expression(stack, data, expr, x, parent_index) + MOI.Nonlinear._parse_logic_expression( + stack, + data, + expr, + x, + parent_index, + ) elseif Meta.isexpr(x, :vect) _parse_vect_expression(stack, data, expr, x, parent_index) elseif Meta.isexpr(x, :hcat) @@ -126,7 +153,14 @@ function _parse_vect_expression( ) @assert Meta.isexpr(x, :vect) id = get(data.operators.multivariate_operator_to_id, :vect, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end @@ -142,7 +176,14 @@ function _parse_row_expression( ) @assert Meta.isexpr(x, :row) id = get(data.operators.multivariate_operator_to_id, :row, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end @@ -158,7 +199,14 @@ function _parse_hcat_expression( ) @assert Meta.isexpr(x, :hcat) id = get(data.operators.multivariate_operator_to_id, :hcat, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end @@ -174,7 +222,14 @@ function _parse_vcat_expression( ) @assert Meta.isexpr(x, :vcat) id = get(data.operators.multivariate_operator_to_id, :vcat, nothing) - push!(expr.nodes, MOI.Nonlinear.Node(MOI.Nonlinear.NODE_CALL_MULTIVARIATE, id, parent_index)) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) for i in length(x.args):-1:1 push!(stack, (length(expr.nodes), x.args[i])) end From 29fc6031520d812a56aebf698c61b84f01ad8917 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Beno=C3=AEt=20Legat?= Date: Tue, 6 Jan 2026 17:45:44 +0100 Subject: [PATCH 17/20] Fixes --- src/reverse_mode.jl | 4 ++ test/ReverseAD.jl | 104 ++++++++++++++++++++++---------------------- 2 files changed, 56 insertions(+), 52 deletions(-) diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index 47cf391..c98e81a 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -29,6 +29,10 @@ single pass through the tree by iterating forwards through the vector of stored nodes. """ function _reverse_mode(d::NLPEvaluator, x) + # Because the operators are checked with `Int` and not `Symbol` + # if we get a model that didn't add our new operators but had user-defined + # operators, we will think that these are one of our new operators + @assert :vect in d.data.operators.multivariate_operators if d.last_x == x # Fail fast if the primal solution has not changed since last call. return diff --git a/test/ReverseAD.jl b/test/ReverseAD.jl index 552ff3f..bf88318 100644 --- a/test/ReverseAD.jl +++ b/test/ReverseAD.jl @@ -29,7 +29,7 @@ end function test_objective_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + 1)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -59,7 +59,7 @@ end function test_objective_and_constraints_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + 1)) Nonlinear.add_constraint(model, :($x^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -96,7 +96,7 @@ end function test_objective_quadratic_multivariate() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + $x * $y + $y^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -130,7 +130,7 @@ end function test_objective_quadratic_multivariate_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x^2)) ey = Nonlinear.add_expression(model, :($y^2)) exy = Nonlinear.add_expression(model, :($ex + $x * $y)) @@ -175,7 +175,7 @@ end function test_objective_ifelse_comparison() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse(1 <= $x <= 2, $x^2, $y^2))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -192,7 +192,7 @@ end function test_objective_ifelse_logic() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse(1 <= $x && $x <= 2, $x^2, $y^2))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -208,7 +208,7 @@ end function test_objective_parameter() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() p = Nonlinear.add_parameter(model, 1.2) Nonlinear.set_objective(model, :($p * $x)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -221,7 +221,7 @@ function test_objective_parameter() end function test_objective_subexpression() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) input = :($x^2 + 1) expr = Nonlinear.add_expression(model, input) @@ -238,7 +238,7 @@ end function test_constraint_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -264,7 +264,7 @@ end function test_constraint_quadratic_multivariate() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2 + $x * $y + $y^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -287,7 +287,7 @@ end function test_constraint_quadratic_multivariate_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x^2)) ey = Nonlinear.add_expression(model, :($y^2)) exy = Nonlinear.add_expression(model, :($ex + $x * $y)) @@ -336,7 +336,7 @@ function test_hessian_sparsity_registered_function() H[2, 2] = 2 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :f, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(f($x, $z) + $y^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y, z]) @@ -366,7 +366,7 @@ function test_hessian_sparsity_registered_rosenbrock() H[2, 2] = 200.0 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :rosenbrock, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(rosenbrock($x, $y))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -396,7 +396,7 @@ function test_hessian_registered_error() H[2, 2] = 200.0 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :rosenbrock, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(rosenbrock($x, $y))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -494,7 +494,7 @@ end function test_derivatives() a = MOI.VariableIndex(1) b = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sin($a^2) + cos($b * 4) / 5 - 2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [a, b]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -516,7 +516,7 @@ function test_derivatives() end function test_NLPBlockData() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) Nonlinear.add_constraint(model, :($x - 1), MOI.LessThan(0.0)) Nonlinear.add_constraint(model, :($x - 2), MOI.GreaterThan(0.0)) @@ -540,7 +540,7 @@ function test_linearity() z = MOI.VariableIndex(3) variables = Dict(x => 1, y => 2, z => 3) function _test_linearity(input, test_value, IJ = [], indices = []) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, input) expr = model[ex] adj = Nonlinear.adjacency_matrix(expr.nodes) @@ -631,7 +631,7 @@ end function test_linearity_no_hess() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x + 1)) Nonlinear.set_objective(model, ex) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -647,7 +647,7 @@ function test_dual_forward() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) function _test_dual_forward(input, x_input, test_value) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, input) evaluator = Nonlinear.Evaluator( model, @@ -687,7 +687,7 @@ function test_gradient_registered_function() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) z = MOI.VariableIndex(3) - model = Nonlinear.Model() + model = ArrayDiff.Model() f(x, y) = (1 / 3) * y^3 - 2x^2 function ∇f(g, x, y) g[1] = -4x @@ -714,7 +714,7 @@ end function test_gradient_jump_855() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective( model, :(ifelse($x <= 3.0, ($x - 2.0)^2, 2 * log($x - 2.0) + 1.0)), @@ -732,7 +732,7 @@ end function test_gradient_abs() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(abs($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -748,7 +748,7 @@ end function test_gradient_trig() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sin($x^2) + cos($y * 4) / 5 - 2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y]) @@ -761,7 +761,7 @@ end function test_gradient_logical() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x > 0.5 && $x < 0.9)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -775,7 +775,7 @@ end function test_gradient_ifelse() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse($x >= 0.5 || $x < 0.1, $x, 5))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -795,7 +795,7 @@ end function test_gradient_sqrt_nan() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sqrt($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -811,7 +811,7 @@ function test_gradient_variable_power() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) z = MOI.VariableIndex(3) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :((1 / $x)^$y - $z)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y, z]) @@ -830,7 +830,7 @@ end function test_single_parameter() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() p = Nonlinear.add_parameter(model, 105.2) Nonlinear.set_objective(model, :($p)) evaluator = @@ -843,7 +843,7 @@ end function test_gradient_nested_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex1 = Nonlinear.add_expression(model, :(sin($x^2) + cos($y * 4) / 5 - 2.0)) ex2 = Nonlinear.add_expression(model, :($ex1)) Nonlinear.set_objective(model, ex2) @@ -859,7 +859,7 @@ end function test_gradient_view() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(($x - 1)^2 + 4 * ($y - $x^2)^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y]) @@ -902,7 +902,7 @@ function test_odd_chunks_Hessian_products() end function _test_odd_chunks_Hessian_products(N) - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:N) Nonlinear.set_objective(model, Expr(:call, :*, x...)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), x) @@ -929,7 +929,7 @@ function _dense_jacobian(jacobian_sparsity, V, m, n) end function test_jacobians_and_jacvec() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:3) a, b, c = x Nonlinear.set_objective(model, :($a * $b + $c^2)) @@ -960,7 +960,7 @@ function test_jacobians_and_jacvec() end function test_jacobians_and_jacvec_with_subexpressions() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:3) a, b, c = x bc = Nonlinear.add_expression(model, :($b * $c)) @@ -993,7 +993,7 @@ end function test_pow_complex_result() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse($x > 0, $x^1.5, -(-$x)^1.5))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1012,7 +1012,7 @@ end function test_constraint_gradient() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2 + $x * $y + $y^2), MOI.LessThan(2.0)) Nonlinear.add_constraint(model, :(cos($y)), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -1032,7 +1032,7 @@ end function test_hessian_length() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(log($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Hess]) @@ -1050,7 +1050,7 @@ end function test_jacobian_length() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :(sin($x)), MOI.LessThan(0.5)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Jac]) @@ -1061,7 +1061,7 @@ end function test_timers() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(log($x))) Nonlinear.add_constraint(model, :(sin($x)), MOI.LessThan(0.5)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1101,7 +1101,7 @@ function test_timers() end function test_varying_length_x() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) MOI.Nonlinear.set_objective(model, :(sin($x))) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1116,7 +1116,7 @@ end function test_univariate_operator_with_no_second_order() f(x::Float64) = x^2 df(x::Float64) = 2 * x - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.register_operator(model, :op_f, 1, f, df) x = MOI.VariableIndex(1) MOI.Nonlinear.add_constraint(model, :(op_f($x)), MOI.LessThan(2.0)) @@ -1130,7 +1130,7 @@ function test_univariate_operator_with_no_second_order() end function test_no_objective() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) @@ -1147,7 +1147,7 @@ function test_no_objective() end function test_x_power_1() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) MOI.Nonlinear.set_objective(model, :($x^1)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1160,7 +1160,7 @@ function test_x_power_1() end function test_variable_first_node_in_tape() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) expr = MOI.Nonlinear.add_expression(model, :($x)) MOI.Nonlinear.set_objective(model, :(sin($expr))) @@ -1173,7 +1173,7 @@ function test_variable_first_node_in_tape() end function test_subexpression_first_node_in_tape() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) expr = MOI.Nonlinear.add_expression(model, :($x)) expr2 = MOI.Nonlinear.add_expression(model, :($expr)) @@ -1187,7 +1187,7 @@ function test_subexpression_first_node_in_tape() end function test_parameter_in_hessian() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) p = MOI.Nonlinear.add_parameter(model, 3.0) MOI.Nonlinear.set_objective(model, :(sin($x + $p))) @@ -1213,7 +1213,7 @@ end function test_classify_linearity_ifelse() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.set_objective(model, :(ifelse($y, $x, 1))) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1226,7 +1226,7 @@ end function test_classify_linearity_logic() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.set_objective(model, :($x && $y)) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1241,7 +1241,7 @@ end function test_hessian_sparsity_with_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() expr = MOI.Nonlinear.add_expression(model, :($x * $y)) expr2 = MOI.Nonlinear.add_expression(model, :($expr)) MOI.Nonlinear.set_objective(model, :(sin($expr2))) @@ -1253,7 +1253,7 @@ end function test_toposort_subexpressions() x = MOI.VariableIndex(1) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() a = MOI.Nonlinear.add_expression(model, :($x)) b = MOI.Nonlinear.add_expression(model, :($x)) c = MOI.Nonlinear.add_expression(model, :($a + $b)) @@ -1269,7 +1269,7 @@ function test_toposort_subexpressions() end function test_eval_user_defined_operator_ForwardDiff_gradient!() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:4) p = MOI.Nonlinear.add_parameter(model, 2.0) ex = MOI.Nonlinear.add_expression(model, :($p * $(x[1]))) @@ -1296,7 +1296,7 @@ function test_eval_user_defined_operator_ForwardDiff_gradient!() end function test_eval_user_defined_operator_type_mismatch() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:4) p = MOI.Nonlinear.add_parameter(model, 2.0) ex = MOI.Nonlinear.add_expression(model, :($p * $(x[1]))) @@ -1342,7 +1342,7 @@ function test_generate_hessian_slice_inner() end function test_hessian_reinterpret_unsafe() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:5) MOI.Nonlinear.add_constraint( model, From 98ee1031ab3b2b3a01657d8499c3f2df8b9ba908 Mon Sep 17 00:00:00 2001 From: Sophie L Date: Thu, 8 Jan 2026 12:37:21 +0100 Subject: [PATCH 18/20] Fix problems * Add OperatorRegistry because immutable in MOI Nonlinear (and Int fields will need to be modified) * Add DEFAULT_MULTIVARIATE_OPERATORS to extend it from MOI Nonlinear * Add OrderedCollections that was used in Model --- Project.toml | 1 + src/ArrayDiff.jl | 1 + src/MOI_Nonlinear_fork.jl | 117 +++++++++++++++++++++++++++++++++++--- src/reverse_mode.jl | 4 +- src/sizes.jl | 7 +-- test/Project.toml | 1 + 6 files changed, 115 insertions(+), 16 deletions(-) diff --git a/Project.toml b/Project.toml index 014259e..e45e9b1 100644 --- a/Project.toml +++ b/Project.toml @@ -9,6 +9,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" [compat] DataStructures = "0.18, 0.19" diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 3bb6c00..7008b8b 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -10,6 +10,7 @@ import ForwardDiff import MathOptInterface as MOI const Nonlinear = MOI.Nonlinear import SparseArrays +import OrderedCollections: OrderedDict """ Mode() <: AbstractAutomaticDifferentiation diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl index 08e29dc..112e443 100644 --- a/src/MOI_Nonlinear_fork.jl +++ b/src/MOI_Nonlinear_fork.jl @@ -1,19 +1,118 @@ # Inspired by MathOptInterface/src/Nonlinear/parse_expression.jl +const DEFAULT_MULTIVARIATE_OPERATORS = [ + :+, + :-, + :*, + :^, + :/, + :ifelse, + :atan, + :min, + :max, + :vect, + :dot, + :hcat, + :vcat, + :norm, + :sum, + :row, +] + +struct OperatorRegistry + # NODE_CALL_UNIVARIATE + univariate_operators::Vector{Symbol} + univariate_operator_to_id::Dict{Symbol,Int} + univariate_user_operator_start::Int + registered_univariate_operators::Vector{MOI.Nonlinear._UnivariateOperator} + # NODE_CALL_MULTIVARIATE + multivariate_operators::Vector{Symbol} + multivariate_operator_to_id::Dict{Symbol,Int} + multivariate_user_operator_start::Int + registered_multivariate_operators::Vector{ + MOI.Nonlinear._MultivariateOperator, + } + # NODE_LOGIC + logic_operators::Vector{Symbol} + logic_operator_to_id::Dict{Symbol,Int} + # NODE_COMPARISON + comparison_operators::Vector{Symbol} + comparison_operator_to_id::Dict{Symbol,Int} + function OperatorRegistry() + univariate_operators = copy(DEFAULT_UNIVARIATE_OPERATORS) + multivariate_operators = copy(DEFAULT_MULTIVARIATE_OPERATORS) + logic_operators = [:&&, :||] + comparison_operators = [:<=, :(==), :>=, :<, :>] + return new( + # NODE_CALL_UNIVARIATE + univariate_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(univariate_operators) + ), + length(univariate_operators), + _UnivariateOperator[], + # NODE_CALL + multivariate_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(multivariate_operators) + ), + length(multivariate_operators), + _MultivariateOperator[], + # NODE_LOGIC + logic_operators, + Dict{Symbol,Int}(op => i for (i, op) in enumerate(logic_operators)), + # NODE_COMPARISON + comparison_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(comparison_operators) + ), + ) + end +end + +""" + Model() + +The core datastructure for representing a nonlinear optimization problem. + +It has the following fields: + * `objective::Union{Nothing,Expression}` : holds the nonlinear objective + function, if one exists, otherwise `nothing`. + * `expressions::Vector{Expression}` : a vector of expressions in the model. + * `constraints::OrderedDict{ConstraintIndex,Constraint}` : a map from + [`ConstraintIndex`](@ref) to the corresponding [`Constraint`](@ref). An + `OrderedDict` is used instead of a `Vector` to support constraint deletion. + * `parameters::Vector{Float64}` : holds the current values of the parameters. + * `operators::OperatorRegistry` : stores the operators used in the model. +""" +mutable struct Model + objective::Union{Nothing,MOI.Nonlinear.Expression} + expressions::Vector{MOI.Nonlinear.Expression} + constraints::OrderedDict{ + MOI.Nonlinear.ConstraintIndex, + MOI.Nonlinear.Constraint, + } + parameters::Vector{Float64} + operators::OperatorRegistry + # This is a private field, used only to increment the ConstraintIndex. + last_constraint_index::Int64 + function Model() + model = MOI.Nonlinear.Model() + ops = [:vect, :dot, :hcat, :vcat, :norm, :sum, :row] + start = length(model.operators.multivariate_operators) + append!(model.operators.multivariate_operators, ops) + for (i, op) in enumerate(ops) + model.operators.multivariate_operator_to_id[op] = start + i + end + return model + end +end + function set_objective(model::MOI.Nonlinear.Model, obj) model.objective = parse_expression(model, obj) return end -function Model() - model = MOI.Nonlinear.Model() - append!( - model.operators.multivariate_operators, - [:vect, :dot, :hcat, :vcat, :norm, :sum, :row], - ) - return model -end - function parse_expression(data::MOI.Nonlinear.Model, input) expr = MOI.Nonlinear.Expression() parse_expression(data, expr, input, -1) diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index c98e81a..f753506 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -457,8 +457,8 @@ function _reverse_eval(f::_SubexpressionStorage) children_indices = SparseArrays.nzrange(f.adj, k) if node.type == MOI.Nonlinear.NODE_CALL_MULTIVARIATE if node.index in - eachindex(MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS) - op = MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS[node.index] + eachindex(DEFAULT_MULTIVARIATE_OPERATORS) + op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :* if f.sizes.ndims[k] != 0 # Node `k` is not scalar, so we do matrix multiplication diff --git a/src/sizes.jl b/src/sizes.jl index 067736b..819cf5b 100644 --- a/src/sizes.jl +++ b/src/sizes.jl @@ -163,14 +163,11 @@ function _infer_sizes( children_indices = SparseArrays.nzrange(adj, k) N = length(children_indices) if node.type == Nonlinear.NODE_CALL_MULTIVARIATE - if !( - node.index in - eachindex(MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS) - ) + if !(node.index in eachindex(DEFAULT_MULTIVARIATE_OPERATORS)) # TODO user-defined operators continue end - op = MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS[node.index] + op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :vect _assert_scalar_children( sizes, diff --git a/test/Project.toml b/test/Project.toml index 7ed96af..d66f113 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -4,3 +4,4 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" \ No newline at end of file From 41ca18784b82268a9762d08ec3f86170f1f32670 Mon Sep 17 00:00:00 2001 From: Sophie L Date: Thu, 8 Jan 2026 13:15:25 +0100 Subject: [PATCH 19/20] Change tests accordingly when merging with main --- test/ArrayDiff.jl | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/test/ArrayDiff.jl b/test/ArrayDiff.jl index ffc039f..db4a4d4 100644 --- a/test/ArrayDiff.jl +++ b/test/ArrayDiff.jl @@ -357,12 +357,12 @@ function test_objective_norm_of_matrix_with_sum() end function test_objective_norm_of_product_of_matrices() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4] * [1 0; 0 1]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] * [1 0; 0 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -389,12 +389,12 @@ function test_objective_norm_of_product_of_matrices() end function test_objective_norm_of_product_of_matrices_with_sum() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(norm(([$x1 $x2; $x3 $x4] + [1 1; 1 1]) * [1 0; 0 1])), ) @@ -499,12 +499,12 @@ function test_objective_norm_of_product_of_matrices_with_sum() end function test_objective_norm_of_mtx_vector_product() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(([$x1 $x2; $x3 $x4] * [1; 1])))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] * [1; 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes From 12399566ee1bead14bde7c98be323be2af0e6fd9 Mon Sep 17 00:00:00 2001 From: Sophie L Date: Thu, 8 Jan 2026 13:19:25 +0100 Subject: [PATCH 20/20] Correct format --- src/reverse_mode.jl | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index f753506..e80897e 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -456,8 +456,7 @@ function _reverse_eval(f::_SubexpressionStorage) node = f.nodes[k] children_indices = SparseArrays.nzrange(f.adj, k) if node.type == MOI.Nonlinear.NODE_CALL_MULTIVARIATE - if node.index in - eachindex(DEFAULT_MULTIVARIATE_OPERATORS) + if node.index in eachindex(DEFAULT_MULTIVARIATE_OPERATORS) op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :* if f.sizes.ndims[k] != 0