######################################################################### ### Optimisation algorithms in Statistics I ### ### PhD course 2020 ### ### D-optimal design for quadratic regression with x in [0,1.2] ### ### and three different x-values, see lecture notes of Lecture 4 ### ### Frank Miller, 2020-11-06 ### ######################################################################### g <- function(y){ # vector y consists of (x1, x2, x3, w1, w2) x <- y[1:3] w <- c(y[4:5], 1-sum(y[4:5])) # using transformation method for equality constraint f1 <- c(1,x[1], x[1]^2) f2 <- c(1,x[2], x[2]^2) f3 <- c(1,x[3], x[3]^2) det( w[1]*f1%*%t(f1) + w[2]*f2%*%t(f2) + w[3]*f3%*%t(f3) ) } # Matrix U and vector c for constraints U <- matrix(0, nrow=9, ncol=5) U[1,1] <- U[3,2] <- U[5,3] <- U[7,4] <- U[8,5] <- 1 U[2,1] <- U[4,2] <- U[6,3] <- U[9,4] <- U[9,5] <- -1 d <- c(rep(c(0, -1.2), 3), 0, 0, -1) # Starting value needs to be in the inner of the feasible set startv <- c(0.2, 0.3, 0.4, 0.2, 0.2) # Nelder-Mead as inner optimisation method: res <- constrOptim(startv, f=g, grad=NULL, ui=U, ci=d, control=list(fnscale=-1)) round(res$par, 3) # Further information: res