-
Notifications
You must be signed in to change notification settings - Fork 48
Expand file tree
/
Copy pathruntests.jl
More file actions
76 lines (64 loc) · 2.52 KB
/
runtests.jl
File metadata and controls
76 lines (64 loc) · 2.52 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
using NLPModels, NLPModelsJuMP, OptimizationProblems, Test
@test names(OptimizationProblems.ADNLPProblems) == [:ADNLPProblems]
import ADNLPModels
ndef = OptimizationProblems.ADNLPProblems.default_nvar
# Test that every problem can be instantiated.
for prob in names(OptimizationProblems.PureJuMP)
prob == :PureJuMP && continue
println(prob)
prob_fn = eval(Meta.parse("OptimizationProblems.PureJuMP.$(prob)"))
model = prob_fn(ndef)
prob == :hs61 && continue #because nlpmodelsjump is not working here https://github.com/JuliaSmoothOptimizers/NLPModelsJuMP.jl/issues/84
prob in [:clplatea, :clplateb, :clplatec, :fminsrf2] && continue # issue because variable is a matrix
nlp_jump = MathOptNLPModel(model)
nlp_ad = eval(Meta.parse("OptimizationProblems.ADNLPProblems.$(prob)()"))
@test nlp_jump.meta.nvar == nlp_ad.meta.nvar
@test nlp_jump.meta.x0 == nlp_ad.meta.x0
@test nlp_jump.meta.ncon == nlp_ad.meta.ncon
@test nlp_jump.meta.lvar == nlp_ad.meta.lvar
@test nlp_jump.meta.uvar == nlp_ad.meta.uvar
x1 = nlp_ad.meta.x0 + rand(nlp_ad.meta.nvar)/10
x2 = nlp_ad.meta.x0 + rand(nlp_ad.meta.nvar)/10
n0 = max(abs(obj(nlp_ad, nlp_ad.meta.x0)), 1)
@test isapprox(obj(nlp_ad, x1), obj(nlp_jump, x1), atol=1e-14 * n0)
@test isapprox(obj(nlp_ad, x2), obj(nlp_jump, x2), atol=1e-14 * n0)
if nlp_ad.meta.ncon > 0
for xj in [x1, x2]
vioad = cons(nlp_ad, xj)
vioju = cons(nlp_jump, xj)
@test nlp_ad.meta.lcon == nlp_jump.meta.lcon
@test nlp_ad.meta.ucon == nlp_jump.meta.ucon
@test isapprox(vioad, vioju)
end
end
end
for prob in names(OptimizationProblems.ADNLPProblems)
prob == :ADNLPProblems && continue
prob in [:clplatea, :clplateb, :clplatec, :fminsrf2] && continue # issue because variable is a matrix
println(prob)
try
n, m = eval(Meta.parse("OptimizationProblems.ADNLPProblems.get_$(prob)_meta(n=$(2 * ndef))"))
meta_pb = eval(Meta.parse("OptimizationProblems.ADNLPProblems.$(prob)_meta"))
if meta_pb[:variable_size]
@test n != meta_pb[:nvar]
else
@test n == meta_pb[:nvar]
end
if meta_pb[:variable_con_size]
@test m != meta_pb[:ncon]
else
@test m == meta_pb[:ncon]
end
catch
# prob_meta and get_prob_meta not defined for all problems yet.
end
for T in [Float32, Float64]
nlp = eval(Meta.parse("OptimizationProblems.ADNLPProblems.$(prob)(type=$(Val(T)))"))
x0 = nlp.meta.x0
@test eltype(x0) == T
@test typeof(obj(nlp, x0)) == T
if nlp.meta.ncon > 0
@test eltype(cons(nlp, x0)) == T
end
end
end