Multivariate Covariance Estimate CI's

# -----------------------------------------------------------------------------------------------------------
# Script: 2ACE_Chol.R
# Fits a MV Cholesky ACE Decomposition
# Data File: Covariate Regressed Cross-trait Data.csv
# Phenotypes: Facial masculinity preference, rated facial masculinity, morphometric facial masculinity.
# Type of data: Raw continuous data
# ZYG: 1=MZF, 2= MZM, 3 = DZF, 4 = DZM
# Assumptions: means and variances can be equated across birth order & zygosity groups
# ------------------------------------------------------------------------------------------------------------
#directory
setwd("~/Desktop/PhD/Twin Modelling/OpenMX/Cross-twin Masc Pref")
# Load Libraries
require(OpenMx)
require(psych)
source("~/Desktop/PhD/Twin Modelling/OpenMx/GenEpiHelperFunctions.R")
#data <- read.csv('Covariate Regressed Cross-trait Data.csv')
data <- read.csv('Covariate Regressed Non-Standardized Cross-trait Data.csv')
data$FaceP1 <- data$FacePref1
data$RMasc1 <- data$Ratedmasc1
data$MMasc1 <- data$Morphmasc1
data$FaceP2 <- data$FacePref2
data$RMasc2 <- data$Ratedmasc2
data$MMasc2 <- data$Morphmasc2
nv <- 3 # number of varPBbles
nt <- 2 # number twins/siblings
ntv <- nv*nt # number of total varPBbles #12
varnames <- c('FaceP','RMasc','MMasc')
selVars <- c('FaceP1', 'RMasc1','MMasc1',
'FaceP2', 'RMasc2','MMasc2')
#FEMALES
mzdata <- subset(data, Zygosity==1, selVars)
dzdata <- subset(data, Zygosity==3, selVars)
describe(mzdata)
describe(dzdata)
# Create start values
Stmean <-colMeans(mzdata[,1:3],na.rm=TRUE)
# Create Labels for Lower Triangular Matrices
aLabs <- paste("a", do.call(c, sapply(seq(1, nv), function(x){ paste(x:nv, x,sep="") })), sep="")
cLabs <- paste("c", do.call(c, sapply(seq(1, nv), function(x){ paste(x:nv, x,sep="") })), sep="")
eLabs <- paste("e", do.call(c, sapply(seq(1, nv), function(x){ paste(x:nv, x,sep="") })), sep="")
# Create Labels for Column and Diagonal Matrices
mLabs <- paste("m",1:nv,sep="")
# Specify Cholesky ACE Decomposition
# -------------------------------------------------------------------------------------------------
# Matrices declared to store a, c, and e Path Coefficients
pathA <- mxMatrix( type="Lower", nrow=nv, ncol=nv, free=TRUE, values=.4, labels=aLabs, name="a" )
pathC <- mxMatrix( type="Lower", nrow=nv, ncol=nv, free=TRUE, values=.1, labels=cLabs, name="c" )
pathE <- mxMatrix( type="Lower", nrow=nv, ncol=nv, free=TRUE, values=.2, labels=eLabs, name="e" )
# Matrices generated to hold A, C, and E computed Variance Components
covA <- mxAlgebra( expression=a %*% t(a), name="A" )
covC <- mxAlgebra( expression=c %*% t(c), name="C" )
covE <- mxAlgebra( expression=e %*% t(e), name="E" )
# Algebra to compute total variances and standard deviations (diagonal only)
covP <- mxAlgebra( expression=A+C+E, name="V" )
matI <- mxMatrix( type="Iden", nrow=nv, ncol=nv, name="I")
invSD <- mxAlgebra( expression=solve(sqrt(I*V)), name="iSD")
# Algebra to compute Correlations
invSDa<- mxAlgebra( expression=solve(sqrt(I*A)), name="iSDa")
invSDc<- mxAlgebra( expression=solve(sqrt(I*C)), name="iSDc")
invSDe<- mxAlgebra( expression=solve(sqrt(I*E)), name="iSDe")
Rph <- mxAlgebra( expression=iSD %&% V , name="Phcor")
Rg <- mxAlgebra( expression=iSDa %&% A , name="Acor")
Rc <- mxAlgebra( expression=iSDc %&% C , name="Ccor")
Re <- mxAlgebra( expression=iSDe %&% E , name="Ecor")
# Algebras to compute Standardized Variance Components
rowVars <- rep('v',nv)
colVars <- rep(c('A','C','E','h2','c2','e2'),each=nv)
estVars <- mxAlgebra( expression=cbind(A,C,E,A/V,C/V,E/V), name="est", dimnames=list(rowVars,colVars))
# Algebra for expected Mean and Variance/Covariance Matrices in MZ & DZ twins
MeanG <- mxMatrix( type="Full", nrow=1, ncol=ntv, free=TRUE, values=Stmean, labels=c(mLabs,mLabs), name="expMean" )
covMZ <- mxAlgebra( expression= rbind( cbind(A+C+E , A+C),
cbind(A+C , A+C+E)), name="expCovMZ" )
covDZ <- mxAlgebra( expression= rbind( cbind(A+C+E , 0.5%x%A+C),
cbind(0.5%x%A+C , A+C+E)), name="expCovDZ" )
dataMZ <- mxData( observed=mzdata, type="raw" )
dataDZ <- mxData( observed=dzdata, type="raw" )
expfunctionMZ <- mxExpectationNormal("expCovMZ", "expMean", dimnames = selVars)
objMZ <- mxFitFunctionML()
expfunctionDZ <- mxExpectationNormal("expCovDZ", "expMean", dimnames = selVars)
objDZ <- mxFitFunctionML()
# Combine Groups
pars <- list( pathA, pathC, pathE, covA, covC, covE, covP, matI, invSD, invSDa, invSDc, invSDe, Rph, Rg, Rc, Re, estVars )
modelMZ <- mxModel( pars, covMZ, MeanG, dataMZ, expfunctionMZ, objMZ, name="MZ" )
modelDZ <- mxModel( pars, covDZ, MeanG, dataDZ, expfunctionDZ, objDZ, name="DZ" )
minus2ll <- mxAlgebra( MZ.objective + DZ.objective, name="minus2sumloglikelihood" )
obj <- mxFitFunctionAlgebra("minus2sumloglikelihood")
h2 <- mxCI (c ('MZ.est[1,10]', 'MZ.est[2,11]', 'MZ.est[3,12]') )
h2cov <- mxCI (c ('MZ.est[1,11]', 'MZ.est[1,12]', 'MZ.est[2,12]') ) #h2cov
c2 <- mxCI (c ('MZ.est[1,13]', 'MZ.est[2,14]', 'MZ.est[3,15]') ) #c2
c2cov <- mxCI (c ('MZ.est[1,14]', 'MZ.est[1,15]', 'MZ.est[2,15]') ) #c2cov
e2 <- mxCI (c ('MZ.est[1,16]', 'MZ.est[2,17]', 'MZ.est[3,18]') ) #e2
e2cov <- mxCI (c ('MZ.est[1,19]', 'MZ.est[1,20]', 'MZ.est[2,20]') ) #e2cov
pheno <- mxCI (c ('MZ.Phcor[1,2]','MZ.Phcor[1,3]', 'MZ.Phcor[2,3]') ) #rph
rg <- mxCI (c ('MZ.Acor[2,1]', 'MZ.Acor[3,1]', 'MZ.Acor[3,2]') ) # Rg
rc <- mxCI (c ('MZ.Ccor[2,1]', 'MZ.Ccor[3,1]', 'MZ.Ccor[3,2]') ) # Rc
re <- mxCI (c ('MZ.Ecor[2,1]', 'MZ.Ecor[3,1]', 'MZ.Ecor[3,2]') ) # Re
CIs <- list( h2, h2cov, c2, c2cov, e2, e2cov, pheno, rg, rc, re )
CholAceModel<- mxModel( "CholACE", pars, modelMZ, modelDZ, minus2ll, obj, CIs)
CholAceFit <- mxRun(CholAceModel, intervals = TRUE)
Box constraints or NPSOL
lbound
s andubound
s on all of your free parameters, such that you allow a wide range of values. For instance, if 0.4 is a reasonable start value for the elements ofpathA
, then you could set theirlbound
s andubound
s to, say, -10 and 10, respectively.Alternatively, if you are running a build of OpenMx that includes NPSOL, stick
mxOption(NULL,"Default optimizer","NPSOL")
near the beginning of your script, and try re-running it.Log in or register to post comments
In reply to Box constraints or NPSOL by AdminRobK
Thanks, Rob, that seems to
Log in or register to post comments
In reply to Thanks, Rob, that seems to by James Sherlock
In this case, I don't think
As a developer, I know that OpenMx internally represents the confidence-interval optimization problem differently when using NPSOL and SLSQP. From experience, I know that SLSQP can choke on optimization problems that NPSOL can successfully solve. Hence, my advice.
Log in or register to post comments
In reply to Thanks, Rob, that seems to by James Sherlock
code -1
It would really be nice to debug this so we know the true cause.
Log in or register to post comments