Diferenças

Aqui você vê as diferenças entre duas revisões dessa página.

Link para esta página de comparações

Ambos lados da revisão anteriorRevisão anterior
Próxima revisão
Revisão anterior
artigos:ernesto3:sim [2008/09/26 08:41] ernestoartigos:ernesto3:sim [2008/10/13 13:00] (atual) ernesto
Linha 1: Linha 1:
 ======Simulation study ====== ======Simulation study ======
  
-===== Test beta bias =====+===== Algorithm =====
  
 <code R> <code R>
-gs <- expand.grid((0:10)/10, (0:10)/10) +################################################################################################# 
-niter <- 100 +
-arr <- array(NAdim=c(niter, 2, 2), dimnames=list(niter=1:niter, stat=c("beta","beta.var"), dep=c(1,0)))+# PAPER COMPANION: SIMULATION STUDY 
 +# Authors: Ernesto Jardim <ernesto@ipimar.pt>  
 +# Paulo Ribeiro Jr. <paulojus@ufpr.br> 
 +# Date20081013 
 +# Objective Test paper methodology against design-based estimators regarding 
 +# association between age compositions and the age aggregated abundance. 
 +# Tree options are simulated: (i) no association, (iiweek association 
 +# (iii) strong association. 
 +# Note: The association is induced by considering correlated multivariate gaussian  
 +# processes as the basis for building the age aggregated abundance and age  
 +# compositions 
 +
 +#################################################################################################
  
-gau +R libraries 
-set.seed(111+library(geoR
-gd <- grf(grid=gs, cov.pars=c(1,0.2), mean=2, nsim=niter)+library(lattice) 
 +library(MASS) 
 +library(RandomFields) 
 +library(compositions)
  
-for(i in 1:niter){ +#============================================================================================== 
-cat(i) +# SIMULATING THE PROCESSES 
- ggdd <- gd +#============================================================================================== 
- ggdd$data <- gd$data[,i] +# grid 
- lf <- likfit(ggdd, ini.cov.pars=c(1,0.2), messages=FALSE+gs <- expand.grid((0:40)/40(0:40)/40
- arr[i,,1] <- c(lf$beta, lf$beta.var+# size of processes 
-}+<- nrow(gs
 +# number of replicates 
 +nsim <- 250 
 + 
 +#---------------------------------------------------------------------------------------------- 
 +# STEP 1 : gaussian processes to build abundance and compositions 
 +#----------------------------------------------------------------------------------------------
  
-gau independente+object 
 +arr0 <- array(NA, dim=c(n,7,nsim), dimnames=list(loc=1:n, age=c("all","1i","2i","1w","2w","1s","2s"), nsim=1:nsim)) 
 +# variance-covariance matrix 
 +s2 <- 0.5 
 +Sig <- diag(c(1,1,1,1,1,1,1)) 
 +Sig[1,4] <- 0.45; Sig[4,1] <- 0.45; Sig[1,6] <- 0.9; Sig[6,1] <- 0.9; Sig[6,4] <- 0.5; Sig[4,6] <- 0.5 
 +Sig <- Sig*s2 
 +# simulation
 set.seed(111) set.seed(111)
-gd <- grf(grid=gs, cov.pars=c(0,0), mean=2, nsim=niter, nugget=1) +for(i in 1:nsim){ 
- + arr0[,,i] <- mvrnorm(nrow(gs), c(1,0,0,0,0,0,0)Sig)
-for(i in 1:niter){ +
-cat(i) +
- ggdd <- gd +
- ggdd$data <- gd$data[,i] +
- lf <- likfit(ggddini.cov.pars=c(0.5,0.5)messages=F) +
- arr[i,,2] <- c(lf$betalf$beta.var)+
 } }
 + 
 +#----------------------------------------------------------------------------------------------
 +# STEP 2: generate 250 replicates of a log-gaussian spatial process (Diggle and Ribeiro Jr, 2007)
 +#         with  mu=1, phi=0.2, sigma2=0.5, tau2=0.5
 +#----------------------------------------------------------------------------------------------
  
-pdf("betabias.pdf"+# Generate a spatial Gaussian process Z 
-par(mfrow=c(2,2)+phi <- 0.2 
-hist(arr[,1,1], main="beta for mu=2 s2=1 phi=0.2 t2=0"+sigmasq <- 0.
-hist(arr[,1,2], main="beta for mu=2 s2=0 phi=0 t2=1"+set.seed(222
-hist(arr[,2,1], main="beta.var"+Z <- grf(grid=gs, cov.pars=c(sigmasqphi), nsim=nsim
-hist(arr[,2,2], main="beta.var"+# build a log gausian process Y 
-dev.off() +Ysim <- Z 
-</code>+Ysim$data <- exp(Z$data+arr0[,1,]) 
 +# Y characteristics 
 +Ysim.lmean <- apply(log(Ysim$data), 2, mean
 +Ysim.lvar <- apply(log(Ysim$data), 2, var) 
 +Ysim.lnhat <- exp(Ysim.lmean+Ysim.lvar/2)
  
-<note> +#---------------------------------------------------------------------------------------------- 
-O problema está na variância do beta que não é um estimador da variância do processo. Logo quando fazemos a backtrans há uma subestimação da média do processo. +# STEP 3: build compositions 
-</note>+#----------------------------------------------------------------------------------------------
  
-===== Algorítmo ===== +# objects 
-<code R> +arr00 <- array(1, dim=c(n,3,nsim), dimnames=list(loc=1:n, age=1:3, nsim=1:nsim)) 
-## Modelovariáveis com mu dependente e correlação componente espacial parcialmente comum +Psim <- array(NA, dim=c(n,3,nsim,3), dimnames=list(loc=1:n, age=1:3, nsim=1:nsim, mucor=c("indep", "dep045","dep090"))) 
-## Y1 mu1 + S00 + S11 + e1 mu1 + sig00 R + sig11 R11 + e1 +option 1no association between compositions and age aggregated abundance 
-## Y2 mu2 + S00 + S22 + e2 mu1 + sig00 R + sig22 R22 + e2 +arr00[,1:2,] <- exp(arr0[,2:3,]) 
-## Y3 mu3 + S00 + S33 + e3 mu1 + sig00 R + sig33 R33 + e3 +Psim[,,,1] <- aperm(apply(arr00,c(1,3),function(x) x/sum(x)),c(2,1,3)
-## Constraint by:  +option 2: week association between compositions and age aggregated abundance 
-## sig00 + sig## = 1; e# = +arr00[,1:2,] <- exp(arr0[,4:5,]) 
-## mu ~ MVG(c(mu1mu2mu3), Sigma+Psim[,,,2] <- aperm(apply(arr00,c(1,3),function(x) x/sum(x)),c(2,1,3)) 
-## mu1 = mu2 = mu3 = +option 3: strong association between compositions and age aggregated abundance 
-## diag(Sigma= e#  +arr00[,1:2,<- exp(arr0[,6:7,]) 
-</code>+Psim[,,,3] <- aperm(apply(arr00,c(1,3),function(x) x/sum(x)),c(2,1,3)) 
 +# P characteristics
  
-**i indexa localizações +#---------------------------------------------------------------------------------------------- 
-j indexa idades +# STEP 4: build abundance at age  
-**+#----------------------------------------------------------------------------------------------
  
-  * Definir grid +# objects 
-  * Definir sig00 (usado para gerir associação espacial entre idades) +Isim.ln <- array(NA, dim=c(n,3,nsim,3), dimnames=list(loc=1:n, age=1:3, nsim=1:nsimmucor=c("indep""dep045","dep090"))
-  * Simular Ij gaussiana +sim 
-     * set sig00 e phi00 sig00/5 +for(i in 1:3){ 
-     * set sig## = 1-sig00 e phi## sig##/5 + for(j in 1:3){ 
-     * set mu## 0.4 + Isim.ln[,i,,j] <- Psim[,i,,j]*Ysim$data 
-     * sim S ~ MVN(0,Sig) Sig=f(sigphi)  +
-     * sim mu ~ MVN({mu1,mu2,m3}Sigmu) Sigmu = {indepdep}  +
-     set Ij = muj + S00 + Sjj +# I characteristics 
-  * Construir Y = prod(exp{Ij}) +Isim.lnmean <- apply(log(Isim.ln), c(2,3,4), mean) 
-  * Construir Pj = exp{Ij}/sum_j(exp{Ij}) +Isim.lnvar <- apply(log(Isim.ln), c(2,3,4), var
-  * Construir Ij = Y*Pj para garantir que Y = sum_j(Ij) +Isim.lnhat <- exp(Isim.lnmean+Isim.lnvar/2)
-  * Aleatorizar vector de localizações (100) +
-  * Construir amostras de YP e I +
-  * Ajustar modelo geo +
-  * Estimar Y +
-    * Yhat = exp{beta+beta.var/2} +
-    * Ybar = sum_i(Yi)/n +
-  * Estimar Ij +
-    * Ihatj = Yhat * 1/n sum_i(Pijsendo Pij = Iij/sum_j(Iij+
-    * Ibarj = sum_i(Iij)/n+
  
-<code R> +#============================================================================================== 
-## INI +ESTIMATION 
-libraries +#============================================================================================== 
-library(geoR) +number of samples to be drawn from each of the 250 replicate 
-library(lattice) +ns <- 2
-library(MASS) +
-library(RandomFields) +
-source("funs.R")+
  
-definindo grid de simulação e outros parametros da sim +#---------------------------------------------------------------------------------------------- 
-gs <expand.grid((0:40)/40, (0:40)/40) +# STEP 5estimation with methodology proposed by the paper 
-n <nrow(gs) +#----------------------------------------------------------------------------------------------
-niter <100 +
-spcor <seq(0,1,len=5)+
  
-objectos +objects 
-Isim <- array(NA, dim=c(n,3,niter,5,2), dimnames=list(loc=1:nage=1:3, niter=1:niter, spcor=spcor, mucor=c("indep", "dep"))) +Yres <- array(NA, dim=c(ns,4,nsim), dimnames=list(samp=1:nsstat=c("Ybar", "Ybarvar", "Yhat", "Yhatvar"), nsim=1:nsim)) 
-Isim.ln <- array(NA, dim=c(n,3,niter,5,2), dimnames=list(loc=1:n, age=1:3, niter=1:niter, spcor=spcor, mucor=c("indep", "dep"))) +Ihat <- array(NA, dim=c(ns,3,nsim,3), dimnames=list(samp=1:ns, age=1:3, nsim=1:nsim, mucor=c("indep", "dep045","dep090"))) 
-Psim <- array(NA, dim=c(n,3,niter,5,2), dimnames=list(loc=1:n, age=1:3niter=1:niterspcor=spcormucor=c("indep", "dep"))+xd <- expand.grid(dimnames(Ihat)[-c(1,2)]) 
-Ysim <- array(NA, dim=c(n,1,niter,5,2), dimnames=list(loc=1:n, age="all", niter=1:niter, spcor=spcor, mucor=c("indep", "dep"))) +# estimation 
-Yres <- array(NAdim=c(2,niter,5,2), dimnames=list(stat=c("Ybar","Yhat"), niter=1:niterspcor=spcormucor=c("indep""dep"))+set.seed(222) 
-Isim.lnhat <- array(NA, dim=c(3,niter,5,2), dimnames=list(age=1:3niter=1:niterspcor=spcormucor=c("indep", "dep"))+for(j in 1:ns){ 
-Ibar <- array(NA, dim=c(3,niter,5,2), dimnames=list(age=1:3niter=1:niter, spcor=spcor, mucor=c("indep", "dep"))) +cat("\n",,":"date(),"-", sep=""
-Ihat <- array(NA, dim=c(3,niter,5,2), dimnames=list(age=1:3niter=1:niterspcor=spcor, mucor=c("indep", "dep")))+ samp <- sample(1:n, 100) 
 + for(i in 1:nrow(xd)){ 
 + <- xd[i,
 + cat(".", sep="") 
 + # building the sample 
 + Isamp <- Isim.ln[samp,,x[[1]],x[[2]]] 
 + locsamp <- gs[samp,
 + Ysamp <- apply(Isamp,1,sum
 + # estimation of age aggregated abundance 
 + lf <- likfit(as.geodata(cbind(locsampYsamp)), lambda=0ini.cov.pars=c(1,0.2)messages=FALSE) 
 + Yhat <- exp(lf$beta+lf$tausq/2+lf$sigmasq/2
 + Yhatvar <- exp(2*lf$beta+lf$tausq+lf$sigmasq)*(exp(lf$tausq+lf$sigmasq)-1
 + Yres[j,c("Yhat", "Yhatvar"),x[[1]]] <- c(YhatYhatvar)  
 + # estimation of abundance-at-age 
 + prop <- acomp(Isamp) 
 + Ihat[j,,x[[1]],x[[2]]] <- Yhat*c(mean(prop)) 
 +
 +
 + 
 +#---------------------------------------------------------------------------------------------- 
 +# STEP 6: estimation with design-based estimators 
 +#----------------------------------------------------------------------------------------------
  
-## SIM +objects 
-for(i in 1:length(spcor)){ +Ibar <- array(NA, dim=c(ns,3,nsim,3), dimnames=list(samp=1:ns, page=1:3, nsim=1:nsim, mucor=c("indep", "dep045","dep090"))) 
- Isim[,,,i,] <- [[artigos:ernesto3:sim:isim|isim]](gsspcor[i], niter0.2)+estimation 
 +set.seed(222) 
 +for(j in 1:ns){ 
 +cat("\n",j ,":", date(),"-", sep=""
 + samp <- sample(1:n, 100) 
 + for(i in 1:nrow(xd)){ 
 + x <- xd[i,
 + cat("."sep=""
 + # building the sample 
 + Isamp <- Isim.ln[samp,,x[[1]],x[[2]]] 
 + Ysamp <- apply(Isamp,1,sum) 
 + # store means 
 + Yres[j,c("Ybar", "Ybarvar"),x[[1]]] <- c(mean(Ysamp)var(Ysamp))  
 + # estimation of abundance-at-age 
 + Ibar[j,,x[[1]],x[[2]]] <- apply(Isamp,2,mean) 
 + }
 } }
  
-Isim.mean <- apply(Isim, c(2,3,4,5), mean) +#============================================================================================== 
-Isim.var <- apply(Isim, c(2,3,4,5), var)+# RESULTS 
 +#==============================================================================================
  
-Ysim como produto de lognormais +#---------------------------------------------------------------------------------------------- 
-Ysim[,1,,,] <apply(exp(Isim), c(1,3,4,5), prod) +STEP 7: summary statistics 
-# cractarerísticas de Y +#----------------------------------------------------------------------------------------------
-Ysim.smean <apply(Ysim, c(3,4,5), mean) +
-Ysim.svar <apply(Ysim, c(3,4,5), var) +
-Ysim.lmean <apply(log(Ysim), c(3,4,5), mean) +
-Ysim.lvar <apply(log(Ysim), c(3,4,5), var) +
-# composições +
-Psim[] <aperm(apply(exp(Isim), c(1,3,4,5), function(x) x/sum(x)), c(2,1,3,4,5)) +
-observações das marginais +
-Isim.ln[,1,,,] <Ysim*Psim[,1,,,,drop=FALSE] +
-Isim.ln[,2,,,] <Ysim*Psim[,2,,,,drop=FALSE] +
-Isim.ln[,3,,,] <Ysim*Psim[,3,,,,drop=FALSE] +
-Isim.lnmean <apply(log(Isim.ln), c(2,3,4,5), mean) +
-Isim.lnvar <apply(log(Isim.ln), c(2,3,4,5), var) +
-Isim.lnhat <exp(Isim.lnmean+Isim.lnvar/2)+
  
-## running our model +objects 
-xd <- expand.grid(dimnames(Ihat)[-1]) +sim.res <- array(NA, dim=c(nsim,3,3,2,3), dimnames=list(iter=1:nsim, age=1:3, mucor=c("indep", "dep045","dep090"), meth=c("geo","samp"), stats=c("bias", "mse", "ICcov"))) 
-samp <- sample(1:n100)+# computation 
 +for(i in 1:nsim){ 
 + lmusim <- Isim.lnhat[,i,] 
 + # geo  
 + m <Ihat[,,i,] 
 + q025 <- apply(m,c(2,3), quantile, probs=0.025, na.rm=T
 + q975 <- apply(m,c(2,3), quantile, probs=0.975, na.rm=T) 
 + sim.res[i,,,"geo","ICcov"] <- q025<=lmusim & q975>=lmusim 
 + for(j in 1:ns){ 
 + m[j,,] <- m[j,,]-lmusim  
 +
 + sim.res[i,,,"geo","bias"] <- apply(m, c(2,3), mean) 
 + sim.res[i,,,"geo","mse"] <- apply(m, c(2,3)var)
  
-for(i in 1:nrow(xd)){ + # samp 
- <- xd[i,] + <- Ibar[,,i,] 
- cat(as.character(unlist(x)),"\n"+ q025 <- apply(m,c(2,3)quantileprobs=0.025na.rm=T) 
- Isamp <- Isim.ln[samp,,x[[1]],x[[2]],x[[3]]] + q975 <- apply(m,c(2,3), quantile, probs=0.975na.rm=T
- locsamp <- gs[samp,+ sim.res[i,,,"samp","ICcov"] <- q025<=lmusim & q975>=lmusim 
- Ysamp <- Ysim[samp,,x[[1]],x[[2]],x[[3]]] + for(j in 1:ns){ 
- # geodata + m[j,,] <- m[j,,]-lmusim  
- gd <- as.geodata(cbind(locsampYsamp))  +
- lf <- likfit(gdlambda=0, ini.cov.pars=c(1,0.2), messages=FALSE+ sim.res[i,,,"samp","bias"] <- apply(mc(2,3), mean) 
- Yhat <- exp(lf$beta+lf$beta.var/2) + sim.res[i,,,"samp","mse"] <- apply(mc(2,3), var)
- # store means +
- Yres[,x[[1]],x[[2]],x[[3]]] <- c(mean(Ysamp), Yhat)  +
- # compositions +
- prop <- apply(Isamp,1,function(x) x/sum(x)+
- prop[is.na(prop)]<-+
- Ihat[,x[[1]],x[[2]],x[[3]]] <- Yhat*apply(prop,1,mean) +
- Ibar[,x[[1]],x[[2]],x[[3]]] <- apply(Isamp,2,mean)+
 } }
 +# table
 +tab.res <-  apply(sim.res, c(2,3,4,5), mean)
  
-Ihat.bias <- apply(Ihat - exp(1.2+log(0.33)), c(1,3,4), mean) +################################################################################################# 
-Ihat.mse <- apply(Ihat - exp(1.2+log(0.33)+0.7/2), c(1,3,4), var+# THE END (or the beginning ...)  
-Ibar.bias <- apply(Ibar - exp(1.2+log(0.33)+0.7/2), c(1,3,4), mean) +#################################################################################################
-Ibar.mse <- apply(Ibar - exp(1.2+log(0.33)+0.7/2), c(1,3,4), var)+
 </code> </code>
  
 +===== Results =====
  
- 
-  
  

QR Code
QR Code artigos:ernesto3:sim (generated for current page)