diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6aaa77f..aae738c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -25,13 +25,6 @@ jobs: version: ${{ matrix.version }} arch: ${{ matrix.arch }} - uses: julia-actions/cache@v1 - - name: MOI - shell: julia --project=@. {0} - run: | - using Pkg - Pkg.add([ - PackageSpec(name="MathOptInterface", rev="bl/arraydiff"), - ]) - uses: julia-actions/julia-buildpkg@v1 - uses: julia-actions/julia-runtest@v1 with: diff --git a/Project.toml b/Project.toml index 014259e..e45e9b1 100644 --- a/Project.toml +++ b/Project.toml @@ -9,6 +9,7 @@ ForwardDiff = "f6369f11-7733-5829-9624-2563aa707210" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" NaNMath = "77ba4419-2d1f-58cd-9bb1-8ffee604a2e3" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" +OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" [compat] DataStructures = "0.18, 0.19" diff --git a/src/ArrayDiff.jl b/src/ArrayDiff.jl index 1f08c1e..7008b8b 100644 --- a/src/ArrayDiff.jl +++ b/src/ArrayDiff.jl @@ -10,6 +10,7 @@ import ForwardDiff import MathOptInterface as MOI const Nonlinear = MOI.Nonlinear import SparseArrays +import OrderedCollections: OrderedDict """ Mode() <: AbstractAutomaticDifferentiation @@ -58,4 +59,6 @@ include("reverse_mode.jl") include("forward_over_reverse.jl") include("mathoptinterface_api.jl") +include("MOI_Nonlinear_fork.jl") + end # module diff --git a/src/MOI_Nonlinear_fork.jl b/src/MOI_Nonlinear_fork.jl new file mode 100644 index 0000000..112e443 --- /dev/null +++ b/src/MOI_Nonlinear_fork.jl @@ -0,0 +1,336 @@ +# Inspired by MathOptInterface/src/Nonlinear/parse_expression.jl + +const DEFAULT_MULTIVARIATE_OPERATORS = [ + :+, + :-, + :*, + :^, + :/, + :ifelse, + :atan, + :min, + :max, + :vect, + :dot, + :hcat, + :vcat, + :norm, + :sum, + :row, +] + +struct OperatorRegistry + # NODE_CALL_UNIVARIATE + univariate_operators::Vector{Symbol} + univariate_operator_to_id::Dict{Symbol,Int} + univariate_user_operator_start::Int + registered_univariate_operators::Vector{MOI.Nonlinear._UnivariateOperator} + # NODE_CALL_MULTIVARIATE + multivariate_operators::Vector{Symbol} + multivariate_operator_to_id::Dict{Symbol,Int} + multivariate_user_operator_start::Int + registered_multivariate_operators::Vector{ + MOI.Nonlinear._MultivariateOperator, + } + # NODE_LOGIC + logic_operators::Vector{Symbol} + logic_operator_to_id::Dict{Symbol,Int} + # NODE_COMPARISON + comparison_operators::Vector{Symbol} + comparison_operator_to_id::Dict{Symbol,Int} + function OperatorRegistry() + univariate_operators = copy(DEFAULT_UNIVARIATE_OPERATORS) + multivariate_operators = copy(DEFAULT_MULTIVARIATE_OPERATORS) + logic_operators = [:&&, :||] + comparison_operators = [:<=, :(==), :>=, :<, :>] + return new( + # NODE_CALL_UNIVARIATE + univariate_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(univariate_operators) + ), + length(univariate_operators), + _UnivariateOperator[], + # NODE_CALL + multivariate_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(multivariate_operators) + ), + length(multivariate_operators), + _MultivariateOperator[], + # NODE_LOGIC + logic_operators, + Dict{Symbol,Int}(op => i for (i, op) in enumerate(logic_operators)), + # NODE_COMPARISON + comparison_operators, + Dict{Symbol,Int}( + op => i for (i, op) in enumerate(comparison_operators) + ), + ) + end +end + +""" + Model() + +The core datastructure for representing a nonlinear optimization problem. + +It has the following fields: + * `objective::Union{Nothing,Expression}` : holds the nonlinear objective + function, if one exists, otherwise `nothing`. + * `expressions::Vector{Expression}` : a vector of expressions in the model. + * `constraints::OrderedDict{ConstraintIndex,Constraint}` : a map from + [`ConstraintIndex`](@ref) to the corresponding [`Constraint`](@ref). An + `OrderedDict` is used instead of a `Vector` to support constraint deletion. + * `parameters::Vector{Float64}` : holds the current values of the parameters. + * `operators::OperatorRegistry` : stores the operators used in the model. +""" +mutable struct Model + objective::Union{Nothing,MOI.Nonlinear.Expression} + expressions::Vector{MOI.Nonlinear.Expression} + constraints::OrderedDict{ + MOI.Nonlinear.ConstraintIndex, + MOI.Nonlinear.Constraint, + } + parameters::Vector{Float64} + operators::OperatorRegistry + # This is a private field, used only to increment the ConstraintIndex. + last_constraint_index::Int64 + function Model() + model = MOI.Nonlinear.Model() + ops = [:vect, :dot, :hcat, :vcat, :norm, :sum, :row] + start = length(model.operators.multivariate_operators) + append!(model.operators.multivariate_operators, ops) + for (i, op) in enumerate(ops) + model.operators.multivariate_operator_to_id[op] = start + i + end + return model + end +end + +function set_objective(model::MOI.Nonlinear.Model, obj) + model.objective = parse_expression(model, obj) + return +end + +function parse_expression(data::MOI.Nonlinear.Model, input) + expr = MOI.Nonlinear.Expression() + parse_expression(data, expr, input, -1) + return expr +end + +function parse_expression(data, expr, item, parent) + return MOI.Nonlinear.parse_expression(data, expr, item, parent) +end + +function parse_expression( + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + stack = Tuple{Int,Any}[] + push!(stack, (parent_index, x)) + while !isempty(stack) + parent, item = pop!(stack) + if item isa Expr + _parse_expression(stack, data, expr, item, parent) + else + parse_expression(data, expr, item, parent) + end + end + return +end + +function _parse_expression(stack, data, expr, x, parent_index) + if Meta.isexpr(x, :call) + if length(x.args) == 2 && !Meta.isexpr(x.args[2], :...) + MOI.Nonlinear._parse_univariate_expression( + stack, + data, + expr, + x, + parent_index, + ) + else + # The call is either n-ary, or it is a splat, in which case we + # cannot tell just yet whether the expression is unary or nary. + # Punt to multivariate and try to recover later. + MOI.Nonlinear._parse_multivariate_expression( + stack, + data, + expr, + x, + parent_index, + ) + end + elseif Meta.isexpr(x, :comparison) + MOI.Nonlinear._parse_comparison_expression( + stack, + data, + expr, + x, + parent_index, + ) + elseif Meta.isexpr(x, :...) + MOI.Nonlinear._parse_splat_expression( + stack, + data, + expr, + x, + parent_index, + ) + elseif Meta.isexpr(x, :&&) || Meta.isexpr(x, :||) + MOI.Nonlinear._parse_logic_expression( + stack, + data, + expr, + x, + parent_index, + ) + elseif Meta.isexpr(x, :vect) + _parse_vect_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :hcat) + _parse_hcat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :vcat) + _parse_vcat_expression(stack, data, expr, x, parent_index) + elseif Meta.isexpr(x, :row) + _parse_row_expression(stack, data, expr, x, parent_index) + else + error("Unsupported expression: $x") + end +end + +function eval_multivariate_function( + registry::MOI.Nonlinear.OperatorRegistry, + op::Symbol, + x::AbstractVector{T}, +) where {T} + if op == :+ + return sum(x; init = zero(T)) + elseif op == :- + @assert length(x) == 2 + return x[1] - x[2] + elseif op == :* + return prod(x; init = one(T)) + elseif op == :^ + @assert length(x) == 2 + # Use _nan_pow here to avoid throwing an error in common situations like + # (-1.0)^1.5. + return _nan_pow(x[1], x[2]) + elseif op == :/ + @assert length(x) == 2 + return x[1] / x[2] + elseif op == :ifelse + @assert length(x) == 3 + return ifelse(Bool(x[1]), x[2], x[3]) + elseif op == :atan + @assert length(x) == 2 + return atan(x[1], x[2]) + elseif op == :min + return minimum(x) + elseif op == :max + return maximum(x) + elseif op == :vect + return x + end + id = registry.multivariate_operator_to_id[op] + offset = id - registry.multivariate_user_operator_start + operator = registry.registered_multivariate_operators[offset] + @assert length(x) == operator.N + ret = operator.f(x) + MOI.Nonlinear.check_return_type(T, ret) + return ret::T +end + +function _parse_vect_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :vect) + id = get(data.operators.multivariate_operator_to_id, :vect, nothing) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_row_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :row) + id = get(data.operators.multivariate_operator_to_id, :row, nothing) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_hcat_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :hcat) + id = get(data.operators.multivariate_operator_to_id, :hcat, nothing) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end + +function _parse_vcat_expression( + stack::Vector{Tuple{Int,Any}}, + data::MOI.Nonlinear.Model, + expr::MOI.Nonlinear.Expression, + x::Expr, + parent_index::Int, +) + @assert Meta.isexpr(x, :vcat) + id = get(data.operators.multivariate_operator_to_id, :vcat, nothing) + push!( + expr.nodes, + MOI.Nonlinear.Node( + MOI.Nonlinear.NODE_CALL_MULTIVARIATE, + id, + parent_index, + ), + ) + for i in length(x.args):-1:1 + push!(stack, (length(expr.nodes), x.args[i])) + end + return +end diff --git a/src/reverse_mode.jl b/src/reverse_mode.jl index 505421a..e80897e 100644 --- a/src/reverse_mode.jl +++ b/src/reverse_mode.jl @@ -29,6 +29,10 @@ single pass through the tree by iterating forwards through the vector of stored nodes. """ function _reverse_mode(d::NLPEvaluator, x) + # Because the operators are checked with `Int` and not `Symbol` + # if we get a model that didn't add our new operators but had user-defined + # operators, we will think that these are one of our new operators + @assert :vect in d.data.operators.multivariate_operators if d.last_x == x # Fail fast if the primal solution has not changed since last call. return @@ -357,7 +361,7 @@ function _forward_eval( f_input[r] = f.forward_storage[children_arr[i]] ∇f[r] = 0.0 end - f.forward_storage[k] = Nonlinear.eval_multivariate_function( + f.forward_storage[k] = eval_multivariate_function( operators, operators.multivariate_operators[node.index], f_input, @@ -452,9 +456,8 @@ function _reverse_eval(f::_SubexpressionStorage) node = f.nodes[k] children_indices = SparseArrays.nzrange(f.adj, k) if node.type == MOI.Nonlinear.NODE_CALL_MULTIVARIATE - if node.index in - eachindex(MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS) - op = MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS[node.index] + if node.index in eachindex(DEFAULT_MULTIVARIATE_OPERATORS) + op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :* if f.sizes.ndims[k] != 0 # Node `k` is not scalar, so we do matrix multiplication diff --git a/src/sizes.jl b/src/sizes.jl index 067736b..819cf5b 100644 --- a/src/sizes.jl +++ b/src/sizes.jl @@ -163,14 +163,11 @@ function _infer_sizes( children_indices = SparseArrays.nzrange(adj, k) N = length(children_indices) if node.type == Nonlinear.NODE_CALL_MULTIVARIATE - if !( - node.index in - eachindex(MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS) - ) + if !(node.index in eachindex(DEFAULT_MULTIVARIATE_OPERATORS)) # TODO user-defined operators continue end - op = MOI.Nonlinear.DEFAULT_MULTIVARIATE_OPERATORS[node.index] + op = DEFAULT_MULTIVARIATE_OPERATORS[node.index] if op == :vect _assert_scalar_children( sizes, diff --git a/test/ArrayDiff.jl b/test/ArrayDiff.jl index 4b90f3b..db4a4d4 100644 --- a/test/ArrayDiff.jl +++ b/test/ArrayDiff.jl @@ -22,9 +22,9 @@ function runtests() end function test_objective_dot_univariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(dot([$x], [$x]))) + ArrayDiff.set_objective(model, :(dot([$x], [$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Hess]) sizes = evaluator.backend.objective.expr.sizes @@ -41,9 +41,9 @@ function test_objective_dot_univariate() end function test_objective_dot_univariate_and_scalar_mult() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(2*(dot([$x], [$x])))) + ArrayDiff.set_objective(model, :(2*(dot([$x], [$x])))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -60,10 +60,10 @@ function test_objective_dot_univariate_and_scalar_mult() end function test_objective_dot_bivariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(dot([$x, $y] - [1, 2], -[1, 2] + [$x, $y])), ) @@ -84,12 +84,12 @@ function test_objective_dot_bivariate() end function test_objective_hcat_scalars() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(dot([$x1 $x3], [$x2 $x4]))) + ArrayDiff.set_objective(model, :(dot([$x1 $x3], [$x2 $x4]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -109,12 +109,12 @@ function test_objective_hcat_scalars() end function test_objective_hcat_vectors() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(dot(hcat([$x1], [$x3]), hcat([$x2], [$x4]))), ) @@ -137,10 +137,10 @@ function test_objective_hcat_vectors() end function test_objective_dot_bivariate_on_rows() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(dot([$x $y] - [1 2], -[1 2] + [$x $y]))) + ArrayDiff.set_objective(model, :(dot([$x $y] - [1 2], -[1 2] + [$x $y]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -159,9 +159,9 @@ function test_objective_dot_bivariate_on_rows() end function test_objective_norm_univariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) - Nonlinear.set_objective(model, :(norm([$x]))) + ArrayDiff.set_objective(model, :(norm([$x]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -178,10 +178,10 @@ function test_objective_norm_univariate() end function test_objective_norm_bivariate() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm([$x, $y]))) + ArrayDiff.set_objective(model, :(norm([$x, $y]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -203,10 +203,10 @@ function test_objective_norm_bivariate() end function test_objective_norm_of_row_vector() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm([$x1 $x2]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -224,12 +224,12 @@ function test_objective_norm_of_row_vector() end function test_objective_norm_of_vcat_vector() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(vcat($x1, $x3)))) + ArrayDiff.set_objective(model, :(norm(vcat($x1, $x3)))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -249,12 +249,12 @@ function test_objective_norm_of_vcat_vector() end function test_objective_norm_of_vcat_matrix() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(vcat([$x1 $x3], [$x2 $x4])))) + ArrayDiff.set_objective(model, :(norm(vcat([$x1 $x3], [$x2 $x4])))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -279,10 +279,10 @@ function test_objective_norm_of_vcat_matrix() end function test_objective_norm_of_row() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) - Nonlinear.set_objective(model, :(norm(row($x1, $x2)))) + ArrayDiff.set_objective(model, :(norm(row($x1, $x2)))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -300,12 +300,12 @@ function test_objective_norm_of_row() end function test_objective_norm_of_matrix() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -330,12 +330,12 @@ function test_objective_norm_of_matrix() end function test_objective_norm_of_matrix_with_sum() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4] - [1 1; 1 1]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] - [1 1; 1 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -357,12 +357,12 @@ function test_objective_norm_of_matrix_with_sum() end function test_objective_norm_of_product_of_matrices() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm([$x1 $x2; $x3 $x4] * [1 0; 0 1]))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] * [1 0; 0 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes @@ -389,12 +389,12 @@ function test_objective_norm_of_product_of_matrices() end function test_objective_norm_of_product_of_matrices_with_sum() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective( + ArrayDiff.set_objective( model, :(norm(([$x1 $x2; $x3 $x4] + [1 1; 1 1]) * [1 0; 0 1])), ) @@ -499,12 +499,12 @@ function test_objective_norm_of_product_of_matrices_with_sum() end function test_objective_norm_of_mtx_vector_product() - model = Nonlinear.Model() + model = ArrayDiff.Model() x1 = MOI.VariableIndex(1) x2 = MOI.VariableIndex(2) x3 = MOI.VariableIndex(3) x4 = MOI.VariableIndex(4) - Nonlinear.set_objective(model, :(norm(([$x1 $x2; $x3 $x4] * [1; 1])))) + ArrayDiff.set_objective(model, :(norm([$x1 $x2; $x3 $x4] * [1; 1]))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x1, x2, x3, x4]) MOI.initialize(evaluator, [:Grad]) sizes = evaluator.backend.objective.expr.sizes diff --git a/test/Project.toml b/test/Project.toml index 7ed96af..d66f113 100644 --- a/test/Project.toml +++ b/test/Project.toml @@ -4,3 +4,4 @@ LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" MathOptInterface = "b8f27783-ece8-5eb3-8dc8-9495eed66fee" SparseArrays = "2f01184e-e22b-5df5-ae63-d93ebab69eaf" Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" +OrderedCollections = "bac558e1-5e72-5ebc-8fee-abe8a469f55d" \ No newline at end of file diff --git a/test/ReverseAD.jl b/test/ReverseAD.jl index 552ff3f..bf88318 100644 --- a/test/ReverseAD.jl +++ b/test/ReverseAD.jl @@ -29,7 +29,7 @@ end function test_objective_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + 1)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -59,7 +59,7 @@ end function test_objective_and_constraints_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + 1)) Nonlinear.add_constraint(model, :($x^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -96,7 +96,7 @@ end function test_objective_quadratic_multivariate() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x^2 + $x * $y + $y^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -130,7 +130,7 @@ end function test_objective_quadratic_multivariate_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x^2)) ey = Nonlinear.add_expression(model, :($y^2)) exy = Nonlinear.add_expression(model, :($ex + $x * $y)) @@ -175,7 +175,7 @@ end function test_objective_ifelse_comparison() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse(1 <= $x <= 2, $x^2, $y^2))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -192,7 +192,7 @@ end function test_objective_ifelse_logic() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse(1 <= $x && $x <= 2, $x^2, $y^2))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -208,7 +208,7 @@ end function test_objective_parameter() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() p = Nonlinear.add_parameter(model, 1.2) Nonlinear.set_objective(model, :($p * $x)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -221,7 +221,7 @@ function test_objective_parameter() end function test_objective_subexpression() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) input = :($x^2 + 1) expr = Nonlinear.add_expression(model, input) @@ -238,7 +238,7 @@ end function test_constraint_quadratic_univariate() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -264,7 +264,7 @@ end function test_constraint_quadratic_multivariate() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2 + $x * $y + $y^2), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -287,7 +287,7 @@ end function test_constraint_quadratic_multivariate_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x^2)) ey = Nonlinear.add_expression(model, :($y^2)) exy = Nonlinear.add_expression(model, :($ex + $x * $y)) @@ -336,7 +336,7 @@ function test_hessian_sparsity_registered_function() H[2, 2] = 2 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :f, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(f($x, $z) + $y^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y, z]) @@ -366,7 +366,7 @@ function test_hessian_sparsity_registered_rosenbrock() H[2, 2] = 200.0 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :rosenbrock, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(rosenbrock($x, $y))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -396,7 +396,7 @@ function test_hessian_registered_error() H[2, 2] = 200.0 return end - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.register_operator(model, :rosenbrock, 2, f, ∇f, ∇²f) Nonlinear.set_objective(model, :(rosenbrock($x, $y))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -494,7 +494,7 @@ end function test_derivatives() a = MOI.VariableIndex(1) b = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sin($a^2) + cos($b * 4) / 5 - 2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [a, b]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -516,7 +516,7 @@ function test_derivatives() end function test_NLPBlockData() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) Nonlinear.add_constraint(model, :($x - 1), MOI.LessThan(0.0)) Nonlinear.add_constraint(model, :($x - 2), MOI.GreaterThan(0.0)) @@ -540,7 +540,7 @@ function test_linearity() z = MOI.VariableIndex(3) variables = Dict(x => 1, y => 2, z => 3) function _test_linearity(input, test_value, IJ = [], indices = []) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, input) expr = model[ex] adj = Nonlinear.adjacency_matrix(expr.nodes) @@ -631,7 +631,7 @@ end function test_linearity_no_hess() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex = Nonlinear.add_expression(model, :($x + 1)) Nonlinear.set_objective(model, ex) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -647,7 +647,7 @@ function test_dual_forward() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) function _test_dual_forward(input, x_input, test_value) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, input) evaluator = Nonlinear.Evaluator( model, @@ -687,7 +687,7 @@ function test_gradient_registered_function() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) z = MOI.VariableIndex(3) - model = Nonlinear.Model() + model = ArrayDiff.Model() f(x, y) = (1 / 3) * y^3 - 2x^2 function ∇f(g, x, y) g[1] = -4x @@ -714,7 +714,7 @@ end function test_gradient_jump_855() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective( model, :(ifelse($x <= 3.0, ($x - 2.0)^2, 2 * log($x - 2.0) + 1.0)), @@ -732,7 +732,7 @@ end function test_gradient_abs() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(abs($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -748,7 +748,7 @@ end function test_gradient_trig() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sin($x^2) + cos($y * 4) / 5 - 2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y]) @@ -761,7 +761,7 @@ end function test_gradient_logical() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :($x > 0.5 && $x < 0.9)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -775,7 +775,7 @@ end function test_gradient_ifelse() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse($x >= 0.5 || $x < 0.1, $x, 5))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -795,7 +795,7 @@ end function test_gradient_sqrt_nan() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(sqrt($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x]) @@ -811,7 +811,7 @@ function test_gradient_variable_power() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) z = MOI.VariableIndex(3) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :((1 / $x)^$y - $z)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y, z]) @@ -830,7 +830,7 @@ end function test_single_parameter() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() p = Nonlinear.add_parameter(model, 105.2) Nonlinear.set_objective(model, :($p)) evaluator = @@ -843,7 +843,7 @@ end function test_gradient_nested_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() ex1 = Nonlinear.add_expression(model, :(sin($x^2) + cos($y * 4) / 5 - 2.0)) ex2 = Nonlinear.add_expression(model, :($ex1)) Nonlinear.set_objective(model, ex2) @@ -859,7 +859,7 @@ end function test_gradient_view() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(($x - 1)^2 + 4 * ($y - $x^2)^2)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), MOI.VariableIndex[x, y]) @@ -902,7 +902,7 @@ function test_odd_chunks_Hessian_products() end function _test_odd_chunks_Hessian_products(N) - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:N) Nonlinear.set_objective(model, Expr(:call, :*, x...)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), x) @@ -929,7 +929,7 @@ function _dense_jacobian(jacobian_sparsity, V, m, n) end function test_jacobians_and_jacvec() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:3) a, b, c = x Nonlinear.set_objective(model, :($a * $b + $c^2)) @@ -960,7 +960,7 @@ function test_jacobians_and_jacvec() end function test_jacobians_and_jacvec_with_subexpressions() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:3) a, b, c = x bc = Nonlinear.add_expression(model, :($b * $c)) @@ -993,7 +993,7 @@ end function test_pow_complex_result() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(ifelse($x > 0, $x^1.5, -(-$x)^1.5))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1012,7 +1012,7 @@ end function test_constraint_gradient() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :($x^2 + $x * $y + $y^2), MOI.LessThan(2.0)) Nonlinear.add_constraint(model, :(cos($y)), MOI.LessThan(2.0)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) @@ -1032,7 +1032,7 @@ end function test_hessian_length() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(log($x))) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Hess]) @@ -1050,7 +1050,7 @@ end function test_jacobian_length() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.add_constraint(model, :(sin($x)), MOI.LessThan(0.5)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Jac]) @@ -1061,7 +1061,7 @@ end function test_timers() x = MOI.VariableIndex(1) - model = Nonlinear.Model() + model = ArrayDiff.Model() Nonlinear.set_objective(model, :(log($x))) Nonlinear.add_constraint(model, :(sin($x)), MOI.LessThan(0.5)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1101,7 +1101,7 @@ function test_timers() end function test_varying_length_x() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) MOI.Nonlinear.set_objective(model, :(sin($x))) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1116,7 +1116,7 @@ end function test_univariate_operator_with_no_second_order() f(x::Float64) = x^2 df(x::Float64) = 2 * x - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.register_operator(model, :op_f, 1, f, df) x = MOI.VariableIndex(1) MOI.Nonlinear.add_constraint(model, :(op_f($x)), MOI.LessThan(2.0)) @@ -1130,7 +1130,7 @@ function test_univariate_operator_with_no_second_order() end function test_no_objective() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) MOI.initialize(evaluator, [:Grad]) @@ -1147,7 +1147,7 @@ function test_no_objective() end function test_x_power_1() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) MOI.Nonlinear.set_objective(model, :($x^1)) evaluator = Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x]) @@ -1160,7 +1160,7 @@ function test_x_power_1() end function test_variable_first_node_in_tape() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) expr = MOI.Nonlinear.add_expression(model, :($x)) MOI.Nonlinear.set_objective(model, :(sin($expr))) @@ -1173,7 +1173,7 @@ function test_variable_first_node_in_tape() end function test_subexpression_first_node_in_tape() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) expr = MOI.Nonlinear.add_expression(model, :($x)) expr2 = MOI.Nonlinear.add_expression(model, :($expr)) @@ -1187,7 +1187,7 @@ function test_subexpression_first_node_in_tape() end function test_parameter_in_hessian() - model = Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex(1) p = MOI.Nonlinear.add_parameter(model, 3.0) MOI.Nonlinear.set_objective(model, :(sin($x + $p))) @@ -1213,7 +1213,7 @@ end function test_classify_linearity_ifelse() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.set_objective(model, :(ifelse($y, $x, 1))) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1226,7 +1226,7 @@ end function test_classify_linearity_logic() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() MOI.Nonlinear.set_objective(model, :($x && $y)) evaluator = MOI.Nonlinear.Evaluator(model, ArrayDiff.Mode(), [x, y]) MOI.initialize(evaluator, [:Grad, :Jac, :Hess]) @@ -1241,7 +1241,7 @@ end function test_hessian_sparsity_with_subexpressions() x = MOI.VariableIndex(1) y = MOI.VariableIndex(2) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() expr = MOI.Nonlinear.add_expression(model, :($x * $y)) expr2 = MOI.Nonlinear.add_expression(model, :($expr)) MOI.Nonlinear.set_objective(model, :(sin($expr2))) @@ -1253,7 +1253,7 @@ end function test_toposort_subexpressions() x = MOI.VariableIndex(1) - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() a = MOI.Nonlinear.add_expression(model, :($x)) b = MOI.Nonlinear.add_expression(model, :($x)) c = MOI.Nonlinear.add_expression(model, :($a + $b)) @@ -1269,7 +1269,7 @@ function test_toposort_subexpressions() end function test_eval_user_defined_operator_ForwardDiff_gradient!() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:4) p = MOI.Nonlinear.add_parameter(model, 2.0) ex = MOI.Nonlinear.add_expression(model, :($p * $(x[1]))) @@ -1296,7 +1296,7 @@ function test_eval_user_defined_operator_ForwardDiff_gradient!() end function test_eval_user_defined_operator_type_mismatch() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:4) p = MOI.Nonlinear.add_parameter(model, 2.0) ex = MOI.Nonlinear.add_expression(model, :($p * $(x[1]))) @@ -1342,7 +1342,7 @@ function test_generate_hessian_slice_inner() end function test_hessian_reinterpret_unsafe() - model = MOI.Nonlinear.Model() + model = ArrayDiff.Model() x = MOI.VariableIndex.(1:5) MOI.Nonlinear.add_constraint( model,