Skip to content

Commit

Permalink
Add minsurf (#8)
Browse files Browse the repository at this point in the history
  • Loading branch information
tmigot authored Mar 10, 2024
1 parent 239fd7c commit 03813f2
Show file tree
Hide file tree
Showing 18 changed files with 74,315 additions and 74,247 deletions.
18 changes: 9 additions & 9 deletions data/tetra.jl
Original file line number Diff line number Diff line change
@@ -1,16 +1,16 @@
xe_tetra = [
0 0 0
1 0 0
0 1 0
0 0 1
5//10 2//10 1//10
0 0 0
1 0 0
0 1 0
0 0 1
5//10 2//10 1//10
];

Tets_tetra = [
1 2 3 5
5 4 3 1
5 4 1 2
5 2 3 4
1 2 3 5
5 4 3 1
5 4 1 2
5 2 3 4
];

Tets_tetra = vec(reshape(Tets_tetra, 16, 1));
Expand Down
50,636 changes: 25,318 additions & 25,318 deletions data/tetra_duct12.jl

Large diffs are not rendered by default.

24,626 changes: 12,313 additions & 12,313 deletions data/tetra_duct15.jl

Large diffs are not rendered by default.

11,712 changes: 5,856 additions & 5,856 deletions data/tetra_duct20.jl

Large diffs are not rendered by default.

14,464 changes: 7,232 additions & 7,232 deletions data/tetra_foam5.jl

Large diffs are not rendered by default.

9,176 changes: 4,588 additions & 4,588 deletions data/tetra_gear.jl

Large diffs are not rendered by default.

13,310 changes: 6,655 additions & 6,655 deletions data/tetra_hook.jl

Large diffs are not rendered by default.

14 changes: 7 additions & 7 deletions data/triangle.jl
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
xe = [
0 0
1 0
0 1
64//100 25//100
0 0
1 0
0 1
64//100 25//100
];

Tr = [
1 2 4
4 2 3
4 3 1
1 2 4
4 2 3
4 3 1
];

Tr = vec(reshape(Tr, 9, 1));
Expand Down
6,760 changes: 3,380 additions & 3,380 deletions data/triangle_deer.jl

Large diffs are not rendered by default.

4,102 changes: 2,051 additions & 2,051 deletions data/triangle_pacman.jl

Large diffs are not rendered by default.

13,360 changes: 6,680 additions & 6,680 deletions data/triangle_turtle.jl

Large diffs are not rendered by default.

2 changes: 1 addition & 1 deletion src/COPSBenchmark.jl
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ module COPSBenchmark
using Random
using JuMP


include("bearing.jl")
include("chain.jl")
include("camshape.jl")
Expand All @@ -14,6 +13,7 @@ include("gasoil.jl")
include("glider.jl")
include("marine.jl")
include("methanol.jl")
include("minsurf.jl")
include("pinene.jl")
include("polygon.jl")
include("robot.jl")
Expand Down
95 changes: 50 additions & 45 deletions src/chain.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,49 +11,54 @@
# This file has been adapted from https://github.com/JuliaSmoothOptimizers/OptimizationProblems.jl

function chain_model(n::Int)
nh = max(2, div(n - 4, 4))

L = 4
a = 1
b = 3
tmin = b > a ? 1 / 4 : 3 / 4
tf = 1.0
h = tf / nh

nlp = Model()

@variable(nlp, u[k = 1:(nh + 1)], start = 4 * abs(b - a) * (k / nh - tmin))
@variable(nlp, x1[k = 1:(nh + 1)], start = 4 * abs(b - a) * k / nh * (1 / 2 * k / nh - tmin) + a)
@variable(
nlp,
x2[k = 1:(nh + 1)],
start =
(4 * abs(b - a) * k / nh * (1 / 2 * k / nh - tmin) + a) * (4 * abs(b - a) * (k / nh - tmin))
)
@variable(nlp, x3[k = 1:(nh + 1)], start = 4 * abs(b - a) * (k / nh - tmin))

@objective(nlp, Min, x2[nh + 1])

for j = 1:nh
@constraint(nlp, x1[j + 1] - x1[j] - 1 / 2 * h * (u[j] + u[j + 1]) == 0)
end
@constraint(nlp, x1[1] == a)
@constraint(nlp, x1[nh + 1] == b)
@constraint(nlp, x2[1] == 0)
@constraint(nlp, x3[1] == 0)
@constraint(nlp, x3[nh + 1] == L)

@constraint(
nlp,
[j = 1:nh],
x2[j + 1] - x2[j] - 1 / 2 * h * (x1[j] * sqrt(1 + u[j]^2) + x1[j + 1] * sqrt(1 + u[j + 1]^2)) ==
0
)
@constraint(
nlp,
[j = 1:nh],
x3[j + 1] - x3[j] - 1 / 2 * h * (sqrt(1 + u[j]^2) + sqrt(1 + u[j + 1]^2)) == 0
)

return nlp
nh = max(2, div(n - 4, 4))

L = 4
a = 1
b = 3
tmin = b > a ? 1 / 4 : 3 / 4
tf = 1.0
h = tf / nh

nlp = Model()

@variable(nlp, u[k = 1:(nh + 1)], start = 4 * abs(b - a) * (k / nh - tmin))
@variable(
nlp,
x1[k = 1:(nh + 1)],
start = 4 * abs(b - a) * k / nh * (1 / 2 * k / nh - tmin) + a
)
@variable(
nlp,
x2[k = 1:(nh + 1)],
start =
(4 * abs(b - a) * k / nh * (1 / 2 * k / nh - tmin) + a) *
(4 * abs(b - a) * (k / nh - tmin))
)
@variable(nlp, x3[k = 1:(nh + 1)], start = 4 * abs(b - a) * (k / nh - tmin))

@objective(nlp, Min, x2[nh + 1])

for j = 1:nh
@constraint(nlp, x1[j + 1] - x1[j] - 1 / 2 * h * (u[j] + u[j + 1]) == 0)
end
@constraint(nlp, x1[1] == a)
@constraint(nlp, x1[nh + 1] == b)
@constraint(nlp, x2[1] == 0)
@constraint(nlp, x3[1] == 0)
@constraint(nlp, x3[nh + 1] == L)

@constraint(
nlp,
[j = 1:nh],
x2[j + 1] - x2[j] -
1 / 2 * h * (x1[j] * sqrt(1 + u[j]^2) + x1[j + 1] * sqrt(1 + u[j + 1]^2)) == 0
)
@constraint(
nlp,
[j = 1:nh],
x3[j + 1] - x3[j] - 1 / 2 * h * (sqrt(1 + u[j]^2) + sqrt(1 + u[j + 1]^2)) == 0
)

return nlp
end
59 changes: 59 additions & 0 deletions src/minsurf.jl
Original file line number Diff line number Diff line change
@@ -0,0 +1,59 @@
# Minimal surface with obstacle problem

# Find the surface with minimal area, given boundary conditions,
# and above an obstacle.

# This is problem 17=the COPS (Version 3) collection of
# E. Dolan and J. More'
# see "Benchmarking Optimization Software with COPS"
# Argonne National Labs Technical Report ANL/MCS-246 (2004)
# classification OBR2-AN-V-V

# This file has been adapted from https://github.com/JuliaSmoothOptimizers/OptimizationProblems.jl

function minsurf_model(nx::Int, ny::Int)
x_mesh = LinRange(0, 1, nx + 2) # coordinates of the mesh points x

v0 = zeros(nx + 2, ny + 2) # Surface matrix initialization
for i = 1:(nx + 2), j = 1:(ny + 2)
v0[i, j] = 1 - (2 * x_mesh[i] - 1)^2
end

hx = 1 / (nx + 1)
hy = 1 / (ny + 1)
area = 1 // 2 * hx * hy

nlp = Model()

@variable(nlp, v[i = 1:(nx + 2), j = 1:(ny + 2)], start = v0[i, j])

@objective(
nlp,
Min,
sum(
area *
(1 + ((v[i + 1, j] - v[i, j]) / hx)^2 + ((v[i, j + 1] - v[i, j]) / hy)^2)^(1 / 2) for
i = 1:(nx + 1), j = 1:(ny + 1)
) + sum(
area *
(1 + ((v[i - 1, j] - v[i, j]) / hx)^2 + ((v[i, j - 1] - v[i, j]) / hy)^2)^(1 / 2) for
i = 2:(nx + 2), j = 2:(ny + 2)
)
)

@constraint(nlp, [j = 0:(ny + 1)], v[1, j + 1] == 0)
@constraint(nlp, [j = 0:(ny + 1)], v[nx + 2, j + 1] == 0)
@constraint(nlp, [i = 0:(nx + 1)], v[i + 1, 1] == 1 - (2 * i * hx - 1)^2)
@constraint(nlp, [i = 0:(nx + 1)], v[i + 1, ny + 2] == 1 - (2 * i * hx - 1)^2)
@constraint(nlp, [i = 0:(nx + 1), j = 0:(ny + 1)], v[i + 1, j + 1] >= 0)
@constraint(
nlp,
[
i = Int(floor(0.25 / hx)):Int(ceil(0.75 / hx)),
j = Int(floor(0.25 / hy)):Int(ceil(0.75 / hy)),
],
v[i + 1, j + 1] >= 1
)

return nlp
end
32 changes: 16 additions & 16 deletions src/polygon.jl
Original file line number Diff line number Diff line change
Expand Up @@ -9,23 +9,23 @@
# This file has been adapted from https://github.com/JuliaSmoothOptimizers/OptimizationProblems.jl

function polygon_model(n::Int)
nlp = Model()
N = div(n, 2)
@variable(nlp, 0 <= r[1:N] <= 1, start = 1)
@variable(nlp, 0 <= θ[i = 1:N] <= π, start = i * π / (N - 1) - π / (N - 1))
nlp = Model()
N = div(n, 2)
@variable(nlp, 0 <= r[1:N] <= 1, start = 1)
@variable(nlp, 0 <= θ[i = 1:N] <= π, start = i * π / (N - 1) - π / (N - 1))

# impose an order to the angles
@constraint(nlp, θ[N] == π)
@constraint(nlp, r[N] == 0)
for i = 1:(N - 1)
@constraint(nlp, θ[i + 1] - θ[i] >= 0.0)
end
for i = 1:(N - 1)
for j = (i + 1):N
@constraint(nlp, r[i]^2 + r[j]^2 - 2 * r[i] * r[j] * cos(θ[i] - θ[j]) - 1 <= 0)
# impose an order to the angles
@constraint(nlp, θ[N] == π)
@constraint(nlp, r[N] == 0)
for i = 1:(N - 1)
@constraint(nlp, θ[i + 1] - θ[i] >= 0.0)
end
for i = 1:(N - 1)
for j = (i + 1):N
@constraint(nlp, r[i]^2 + r[j]^2 - 2 * r[i] * r[j] * cos(θ[i] - θ[j]) - 1 <= 0)
end
end
end

@objective(nlp, Min, -0.5 * sum(r[i] * r[i + 1] * sin(θ[i + 1] - θ[i]) for i = 1:(N - 1)))
return nlp
@objective(nlp, Min, -0.5 * sum(r[i] * r[i + 1] * sin(θ[i + 1] - θ[i]) for i = 1:(N - 1)))
return nlp
end
115 changes: 58 additions & 57 deletions src/tetra.jl
Original file line number Diff line number Diff line change
Expand Up @@ -11,72 +11,73 @@
include(joinpath("..", "data", "tetra.jl"))

function tetra_model(
x0 = xe_tetra,
TETS::Vector{Int64} = Tets_tetra,
Const::Vector{Int64} = Constants_tetra
x0 = xe_tetra,
TETS::Vector{Int64} = Tets_tetra,
Const::Vector{Int64} = Constants_tetra,
)
τ = 0.0
n = length(x0)
N = round(Int, n / 3)
E = round(Int, length(TETS) / 4)
τ = 0.0
n = length(x0)
N = round(Int, n / 3)
E = round(Int, length(TETS) / 4)

lvar = fill(-Inf, n)
lvar[Const] .= x0[Const]
lvar[Const .+ N] .= x0[Const .+ N]
lvar[Const .+ 2 * N] .= x0[Const .+ 2 * N]
lvar = fill(-Inf, n)
lvar[Const] .= x0[Const]
lvar[Const .+ N] .= x0[Const .+ N]
lvar[Const .+ 2 * N] .= x0[Const .+ 2 * N]

uvar = fill(Inf, n)
uvar[Const] .= x0[Const]
uvar[Const .+ N] .= x0[Const .+ N]
uvar[Const .+ 2 * N] .= x0[Const .+ 2 * N]
uvar = fill(Inf, n)
uvar[Const] .= x0[Const]
uvar[Const .+ N] .= x0[Const .+ N]
uvar[Const .+ 2 * N] .= x0[Const .+ 2 * N]

nlp = Model()
nlp = Model()

@variable(nlp, lvar[i] <= x[i = 1:n] <= uvar[i], start = x0[i])
@variable(nlp, lvar[i] <= x[i = 1:n] <= uvar[i], start = x0[i])

@objective(
nlp,
Min,
sum(
(sum(
(1 * x[TETS[e + E] + N * i] - x[TETS[e] + N * i])^2 +
(2 * x[TETS[e + 2 * E] + N * i] - x[TETS[e + E] + N * i] - x[TETS[e] + N * i])^2 / 3 +
(
3 * x[TETS[e + 3 * E] + N * i] - x[TETS[e + 2 * E] + N * i] - x[TETS[e + E] + N * i] -
x[TETS[e] + N * i]
)^2 / 6 for i = 0:2
)) / (
3 *
(sum(
(x[TETS[e + E] + N * i] - x[TETS[e] + N * i]) *
(
(x[TETS[e + 2 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) -
(x[TETS[e + 2 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)])
) *
sqrt(2) for i = 0:2
))^(2 / 3)
) for e = 1:E
@objective(
nlp,
Min,
sum(
(sum(
(1 * x[TETS[e + E] + N * i] - x[TETS[e] + N * i])^2 +
(2 * x[TETS[e + 2 * E] + N * i] - x[TETS[e + E] + N * i] - x[TETS[e] + N * i])^2 /
3 +
(
3 * x[TETS[e + 3 * E] + N * i] - x[TETS[e + 2 * E] + N * i] -
x[TETS[e + E] + N * i] - x[TETS[e] + N * i]
)^2 / 6 for i = 0:2
)) / (
3 *
(sum(
(x[TETS[e + E] + N * i] - x[TETS[e] + N * i]) *
(
(x[TETS[e + 2 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) -
(x[TETS[e + 2 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)])
) *
sqrt(2) for i = 0:2
))^(2 / 3)
) for e = 1:E
)
)
)

@constraint(
nlp,
[e in 1:E],
sum(
(x[TETS[e + E] + N * i] - x[TETS[e] + N * i]) *
(
(x[TETS[e + 2 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) -
(x[TETS[e + 2 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)])
) *
sqrt(2) for i = 0:2
) >= τ,
)
@constraint(
nlp,
[e in 1:E],
sum(
(x[TETS[e + E] + N * i] - x[TETS[e] + N * i]) *
(
(x[TETS[e + 2 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) -
(x[TETS[e + 2 * E] + N * mod(i - 1, 3)] - x[TETS[e] + N * mod(i - 1, 3)]) *
(x[TETS[e + 3 * E] + N * mod(i + 1, 3)] - x[TETS[e] + N * mod(i + 1, 3)])
) *
sqrt(2) for i = 0:2
) >= τ,
)

return nlp
return nlp
end

tetra_duct12_model() = tetra_model(xe_duct12, TETS_duct12, Const_duct12)
Expand Down
Loading

0 comments on commit 03813f2

Please sign in to comment.