Skip to content
Open
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
10 changes: 5 additions & 5 deletions docs/src/tutorial.md
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ Let's define the famous Rosenbrock function
```math
f(x) = (x_1 - 1)^2 + 100(x_2 - x_1^2)^2,
```
with starting point ``x^0 = (-1.2,1.0)``.
with starting point ``x_0 = (-1.2, 1.0)``.

```@example jumpnlp
using NLPModels, NLPModelsJuMP, JuMP

x0 = [-1.2; 1.0]
model = Model() # No solver is required
@variable(model, x[i=1:2], start=x0[i])
@NLobjective(model, Min, (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2)
@objective(model, Min, (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2)

nlp = MathOptNLPModel(model)
```
Expand Down Expand Up @@ -148,9 +148,9 @@ using NLPModels, NLPModelsJuMP, JuMP
model = Model()
x0 = [-1.2; 1.0]
@variable(model, x[i=1:2] >= 0.0, start=x0[i])
@NLobjective(model, Min, (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2)
@objective(model, Min, (x[1] - 1)^2 + 100 * (x[2] - x[1]^2)^2)
@constraint(model, x[1] + x[2] == 3.0)
@NLconstraint(model, x[1] * x[2] >= 1.0)
@constraint(model, x[1] * x[2] >= 1.0)

nlp = MathOptNLPModel(model)

Expand Down Expand Up @@ -184,7 +184,7 @@ using NLPModels, NLPModelsJuMP, JuMP
model = Model()
x0 = [-1.2; 1.0]
@variable(model, x[i=1:2], start=x0[i])
@NLexpression(model, F1, x[1] - 1)
@expression(model, F1, x[1] - 1)
@NLexpression(model, F2, 10 * (x[2] - x[1]^2))

nls = MathOptNLSModel(model, [F1, F2], name="rosen-nls")
Expand Down
4 changes: 2 additions & 2 deletions src/MOI_wrapper.jl
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ function MOI.copy_to(dest::Optimizer, src::MOI.ModelLike)
"No solver specified, use for instance `using Percival; JuMP.set_attribute(model, \"solver\", PercivalSolver)`",
)
end
dest.nlp, index_map = nlp_model(src)
dest.nlp = nlp_model(src)
dest.solver = dest.options["solver"](dest.nlp)
return index_map
return dest
Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

MOI.copy_to should return the index_map

Copy link
Copy Markdown
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This is what is making the tests fail

Copy link
Copy Markdown
Member Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The index_map was just a dummy dict VariableRef(
i) => VariableRef(i).
Why do you need it?
Is it because of bridges?

Comment thread
amontoison marked this conversation as resolved.
Outdated
end

function MOI.optimize!(model::Optimizer)
Expand Down
33 changes: 25 additions & 8 deletions src/moi_nlp_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@ export MathOptNLPModel
mutable struct MathOptNLPModel <: AbstractNLPModel{Float64, Vector{Float64}}
meta::NLPModelMeta{Float64, Vector{Float64}}
eval::MOI.Nonlinear.Evaluator
jump_variables::Dict{String,Int}
jump_constraints::Dict{String,Int}
lincon::LinearConstraints
quadcon::QuadraticConstraints
nlcon::NonLinearStructure
Expand All @@ -26,15 +28,14 @@ function MathOptNLPModel(jmodel::JuMP.Model; kws...)
end

function MathOptNLPModel(moimodel::MOI.ModelLike; kws...)
return nlp_model(moimodel; kws...)[1]
return nlp_model(moimodel; kws...)
end

function nlp_model(moimodel::MOI.ModelLike; hessian::Bool = true, name::String = "Generic")
index_map, nvar, lvar, uvar, x0 = parser_variables(moimodel)
nlin, lincon, lin_lcon, lin_ucon, quadcon, quad_lcon, quad_ucon =
parser_MOI(moimodel, index_map, nvar)
jump_variables, variables, nvar, lvar, uvar, x0 = parser_variables(moimodel)
nlin, lincon, lin_lcon, lin_ucon, quadcon, quad_lcon, quad_ucon, jump_constraints_linear, jump_constraints_quadratic, valid_label = parser_MOI(moimodel, variables)

nlp_data = _nlp_block(moimodel)
nlp_data, valid_label2, jump_constraints_nonlinear = _nlp_block(moimodel)
nlcon = parser_NL(nlp_data, hessian = hessian)
oracles = parser_oracles(moimodel)
counters = Counters()
Expand All @@ -44,7 +45,7 @@ function nlp_model(moimodel::MOI.ModelLike; hessian::Bool = true, name::String =
if nlp_data.has_objective
obj = Objective("NONLINEAR", 0.0, spzeros(Float64, nvar), COO(), 0)
else
obj = parser_objective_MOI(moimodel, nvar, index_map)
obj = parser_objective_MOI(moimodel, variables)
end

# Total counts
Expand Down Expand Up @@ -74,9 +75,26 @@ function nlp_model(moimodel::MOI.ModelLike; hessian::Bool = true, name::String =
hess_available = hessian && oracles.hessian_oracles_supported,
)

# Label of the constraints
jump_constraints = Dict{String, Int}()
if valid_label && valid_label2 && (oracles.ncon == 0)
sizehint!(jump_constraints, ncon)
for (key, val) in jump_constraints_linear
jump_constraints[key] = val
end
for (key, val) in jump_constraints_quadratic
jump_constraints[key] = val + nlin
end
for (key, val) in jump_constraints_nonlinear
jump_constraints[key] = val + nlin + quadcon.nquad
end
end

return MathOptNLPModel(
meta,
nlp_data.evaluator,
jump_variables,
jump_constraints,
lincon,
quadcon,
nlcon,
Expand All @@ -85,8 +103,7 @@ function nlp_model(moimodel::MOI.ModelLike; hessian::Bool = true, name::String =
hv,
obj,
counters,
),
index_map
)
end

function NLPModels.obj(nlp::MathOptNLPModel, x::AbstractVector)
Expand Down
30 changes: 24 additions & 6 deletions src/moi_nls_model.jl
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,8 @@ mutable struct MathOptNLSModel <: AbstractNLSModel{Float64, Vector{Float64}}
nls_meta::NLSMeta{Float64, Vector{Float64}}
Feval::MOI.Nonlinear.Evaluator
ceval::MOI.Nonlinear.Evaluator
jump_variables::Dict{String,Int}
jump_constraints::Dict{String,Int}
lls::Objective
linequ::LinearEquations
nlequ::NonLinearStructure
Expand All @@ -26,17 +28,16 @@ Construct a `MathOptNLSModel` from a `JuMP` model and a container of JuMP
"""
function MathOptNLSModel(cmodel::JuMP.Model, F; hessian::Bool = true, name::String = "Generic")
moimodel = backend(cmodel)
index_map, nvar, lvar, uvar, x0 = parser_variables(moimodel)
jump_variables, variables, nvar, lvar, uvar, x0 = parser_variables(moimodel)

lls, linequ, nlinequ = parser_linear_expression(cmodel, nvar, index_map, F)
Feval, nlequ, nnlnequ = parser_nonlinear_expression(cmodel, nvar, F, hessian = hessian)
lls, linequ, nlinequ = parser_linear_expression(cmodel, variables, F)
Feval, nlequ, nnlnequ = parser_nonlinear_expression(cmodel, variables, F, hessian = hessian)

_nlp_sync!(cmodel)
moimodel = backend(cmodel)
nlin, lincon, lin_lcon, lin_ucon, quadcon, quad_lcon, quad_ucon =
parser_MOI(moimodel, index_map, nvar)
nlin, lincon, lin_lcon, lin_ucon, quadcon, quad_lcon, quad_ucon, jump_constraints_linear, jump_constraints_quadratic, valid_label = parser_MOI(moimodel, variables)

nlp_data = _nlp_block(moimodel)
nlp_data, valid_label2, jump_constraints_nonlinear = _nlp_block(moimodel)
nlcon = parser_NL(nlp_data, hessian = hessian)
oracles = parser_oracles(moimodel)
nls_counters = NLSCounters()
Expand Down Expand Up @@ -75,11 +76,28 @@ function MathOptNLSModel(cmodel::JuMP.Model, F; hessian::Bool = true, name::Stri

nls_meta = NLSMeta(nequ, nvar, nnzj = Fnnzj, nnzh = Fnnzh, lin = collect(1:nlinequ))

# Label of the constraints
jump_constraints = Dict{String, Int}()
if valid_label && valid_label2 && (oracles.ncon == 0)
sizehint!(jump_constraints, ncon)
for (key, val) in jump_constraints_linear
jump_constraints[key] = val
end
for (key, val) in jump_constraints_quadratic
jump_constraints[key] = val + nlin
end
for (key, val) in jump_constraints_nonlinear
jump_constraints[key] = val + nlin + quadcon.nquad
end
end

return MathOptNLSModel(
meta,
nls_meta,
Feval,
nlp_data.evaluator,
jump_variables,
jump_constraints,
lls,
linequ,
nlequ,
Expand Down
Loading
Loading