diff --git a/Project.toml b/Project.toml index a987f1f..b63824c 100644 --- a/Project.toml +++ b/Project.toml @@ -3,6 +3,13 @@ uuid = "2aa259a0-4d4c-11e9-0909-ad9b1ef308f5" authors = ["florian oswald "] version = "0.1.0" +[deps] +DataFrames = "a93c6f00-e57d-5684-b7b6-d8193f3e46c0" +Ipopt = "b6b21f68-93f8-5de0-b562-5493be1d77c9" +JuMP = "4076af6c-e467-56ae-b986-b466b2749572" +LinearAlgebra = "37e2e46d-f89d-539d-b4ee-838fcccc9c8e" +NLopt = "76087f3c-5699-56af-9a33-bf431cd00edd" + [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/src/HWconstrained.jl b/src/HWconstrained.jl index 3b6faa4..3bff6ae 100644 --- a/src/HWconstrained.jl +++ b/src/HWconstrained.jl @@ -1,117 +1,81 @@ module HWconstrained -greet() = print("Hello World!") - - using JuMP, NLopt, DataFrames, Ipopt - using LinearAlgebra - - export data, table_NLopt, table_JuMP - - function data(a=0.5) - - - - - - - - - - return Dict("a"=>a,"na"=>na,"nc"=>nc,"ns"=>ns,"nss"=>nss,"e"=>e,"p"=>p,"z"=>z,"pi"=>pi) - end - - - function max_JuMP(a=0.5) - - return Dict("obj"=>objective_value(m),"c"=>value(c),"omegas"=>[value(omega[i]) for i in 1:length(omega)]) - end - - function table_JuMP() - d = DataFrame(a=[0.5;1.0;5.0],c = zeros(3),omega1=zeros(3),omega2=zeros(3),omega3=zeros(3),fval=zeros(3)) - for i in 1:nrow(d) - xx = max_JuMP(d[i,:a]) - d[i,:c] = xx["c"] - d[i,:omega1] = xx["omegas"][1] - d[i,:omega2] = xx["omegas"][2] - d[i,:omega3] = xx["omegas"][3] - d[i,:fval] = xx["obj"] - end - return d - end - - - - - - - - - function obj(x::Vector,grad::Vector,data::Dict) - - - - - - - - - - - - - - - - - - - - - - - - - end - - function constr(x::Vector,grad::Vector,data::Dict) - - - - - - - - - - - - - - - end - - function max_NLopt(a=0.5) - - - - - - - - - end - - function table_NLopt() - d = DataFrame(a=[0.5;1.0;5.0],c = zeros(3),omega1=zeros(3),omega2=zeros(3),omega3=zeros(3),fval=zeros(3)) - for i in 1:nrow(d) - xx = max_NLopt(d[i,:a]) - for j in 2:ncol(d)-1 - d[i,j] = xx[2][j-1] - end - d[i,end] = xx[1] - end - return d - end +using JuMP, NLopt, DataFrames, Ipopt +using LinearAlgebra +export data, table_NLopt, table_JuMP + +### Question 1 +function data(a=0.5) +n=3 +p=[1, 1, 1] +e=[2, 0, 0] +s1=s2=4 +z1=[1, 1, 1, 1] +z2=[0.72, 0.92, 1.12, 1.32] +z3=[0.86, 0.96, 1.06, 1.16] +z=[[1, i, j] for i in z2 for j in z3] +z = vcat(z'...) +pi = repeat([1/16], 16) +a=0.5 +na=3 +nc=4 +ns=4 +nss=16 + +return Dict("a"=>a,"na"=>na,"nc"=>nc,"ns"=>ns,"nss"=>nss,"e"=>e,"p"=>p,"z"=>z,"pi"=>pi) +end +#end + +d=data() + +#### Question 2 + +function obj(x::Vector,grad::Vector,data::Dict) + A = data["a"] + Z = data["z"] + pi = data["pi"] + if length(grad) > 0 + grad[1] = A*exp.(-A*x[1]) + for i in 1:3 + grad[i+1] = sum(pi .* Z[:,i] .*A.*exp.(-A.*Z*x[i+1])) + end + end + return -exp.(-A*x[1])+ sum(pi.*-exp.(-A*Z*x[2:4])) +end +obj(ones(4), zeros(4), d) + +function constr(x::Vector,grad::Vector,data::Dict) + if length(grad) > 0 + grad[1] = d["a"]*exp(-d["a"]*x[1]) + grad[2:end] = d["p"] + end + return x[1] + sum(d["p"].*(x[2:end].-d["e"])) +end + +constr(ones(4), zeros(4), d) # keep track of # function evaluations + +function max_NLopt(a=0.5) +d = data(a) +e= d["e"] +optimum = Opt(:LD_MMA, 4) +lower_bounds!(optimum, [0., -Inf, -Inf, -Inf]) +max_objective!(optimum, (x, g)->obj(x, g, d)) +inequality_constraint!(optimum, (x, g)->constr(x, g, d), 1e-8) +ftol_rel!(optimum, 1e-8) +NLopt.optimize(optimum, vcat(0,e)) +end + +function table_NLopt() +d = DataFrame(a=[0.5;1.0;5.0],c = zeros(3),omega1=zeros(3),omega2=zeros(3),omega3=zeros(3),fval=zeros(3)) +for i in 1:nrow(d) +xx = max_NLopt(d[i,:a]) +for j in 2:ncol(d)-1 +d[i,j] = xx[2][j-1] +end +d[i,end] = xx[1] +end +return d +end diff --git a/test/runtests.jl b/test/runtests.jl index 1793ffc..ce20cf9 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -2,52 +2,55 @@ using HWconstrained using Test @testset "HWconstrained.jl" begin - @testset "testing components" begin - - @testset "tests gradient of objective function" begin - - - - - - - end - - - @testset "tests gradient of constraint function" begin - - - - - - - end - end - - @testset "testing result of both maximization methods" begin - - truth = HWconstrained.DataFrame(a=[0.5;1.0;5.0], - c = [1.008;1.004;1.0008], - omega1=[-1.41237;-0.20618;0.758763], - omega2=[0.801455;0.400728;0.0801455], - omega3=[1.60291;0.801455;0.160291], - fval=[-1.20821;-0.732819;-0.013422]) - - @testset "checking result of NLopt maximization" begin - - t1 = table_NLopt() - for c in names(truth) - @test all(maximum.(abs.(t1[c].-truth[c])) .< tol2) - end - end - - - @testset "checking result of NLopt maximization" begin - t1 = table_JuMP() - for c in names(truth) - @test all(maximum.(abs.(t1[c].-truth[c])) .< tol2) - end - end - end +@testset "testing components" begin +d=data() +A=d["a"] +u(x)=-exp(-A*x) +grad=ones(4) + + +@testset "tests gradient of objective function" begin + +end + + + + + +@testset "tests gradient of constraint function" begin + +end + +end + + + +@testset "testing result of both maximization methods" begin +truth = HWconstrained.DataFrame(a=[0.5;1.0;5.0], + +c = [1.008;1.004;1.0008], + +omega1=[-1.41237;-0.20618;0.758763], + +omega2=[0.801455;0.400728;0.0801455], + +omega3=[1.60291;0.801455;0.160291], + +fval=[-1.20821;-0.732819;-0.013422]) + +tol2=1e-2 + +@testset "checking result of NLopt maximization" begin +t1 = table_JuMP() +for c in names(truth) + @test all(maximum.(abs.(t1[c].-truth[c])) .< tol2) +end +end + +@testset "checking result of NLopt maximization" begin +@test all(maximum.(abs.(t1[c].-truth[c])) .< tol2) +end +end +end end