# This software has been approved for release by the U.S. Geological Survey (USGS). # Although the software has been subjected to rigorous review, the USGS reserves # the right to update the software as needed pursuant to further analysis and # review. No warranty, expressed or implied, is made by the USGS or the U.S. # Government as to the functionality of the software and related material nor # shall the fact of release constitute any such warranty. Furthermore, the # software is released on condition that neither the USGS nor the U.S. Government # shall be held liable for any damages resulting from its authorized or # unauthorized use. # # Supplemental material for: # Cotterill et al., 2020 # Parsing the effects of demography, climate, and management on recurrent brucellosis outbreaks in elk # gavin.cotterill@aggiemail.usu.edu ################### ### Muddy Creek ### ################### rm(list = ls()) setwd("~/uncertain") library(pomp2) library(tidyverse) set.seed(594709947L) load("./muddy_m0_swe_04Aug.RData") m0 <- pomp(dat, time = "dyear", t0 = 2004, rprocess = discrete_time(rproc, delta.t = 1), rinit = initlz, dmeasure = dmeas, rmeasure = rmeas, covar = covariate_table(covar, times = "cyear"), statenames = c("S", "I", "R1", "R2", "truesp", "N"), paramnames = c("beta","mu","sigma", "rho","gamma", "theta","iota", "S_0","I_0","R1_0","R2_0"), partrans = parameter_trans(log = log_params, logit = logit_params)) results <- rbind(results_local, results_global) ##' set up a collection of parameter vectors in a neighborhood of the maximum likelihood estimate ##' containing the region of high likelihood ##' weighted quantile function wquant <- function(x, weights, probs = c(0.05,0.5,0.95)){ # 90% quantile idx <- order(x) x <- x[idx] weights <- weights[idx] w <- cumsum(weights)/sum(weights) rval <- approx(w,x,probs,rule = 1) rval$y } results %>% select(-loglik.se)%>% filter(loglik>max(loglik)-0.5*qchisq(df=1,p=0.99))%>% gather(parameter,value)%>% group_by(parameter)%>% dplyr::summarize(min=min(value),max=max(value))%>% ungroup()%>% filter(parameter!="loglik")%>% column_to_rownames("parameter")%>% as.matrix()->ranges sobolDesign(lower=ranges[,"min"], upper=ranges[,"max"], nseq=20) -> params ##' Using these parameters as-is does not take into account the correlation of beta and theta ##' other parameters will also be correlated, eg S_0 and R1_0, but not nearly as strongly ##' we'll use the sobol design for all but beta+theta ##' for them, we'll again use the likelihood ratio test at 99%, plot the two parameters against ##' one another, and then characterize their distributions: results %>% select(-loglik.se)%>% filter(loglik>max(loglik)-0.5*qchisq(df=1,p=0.99))%>% select(beta,theta)->bt x <- bt$beta y <- bt$theta # plot(y~x) fit <- lm(y ~ log(x)) # summary(fit) x1=seq(from=min(x),to=max(x),length.out=1000) y1=predict(fit,newdata=list(x=seq(min(x),to=max(x),length.out=1000)),interval="confidence") # matlines(x1,y1,lwd=2) # now generate 20 pairs to use # plot(y~x) x2=seq(from=min(x),to=max(x),length.out=20) y2=predict(fit,newdata=list(x=seq(min(x),to=max(x),length.out=20)),interval="confidence") # matlines(x2,y2,lwd=2) params$beta <- x2 y2 <- as.data.frame(y2[,1]) names(y2)<-"theta" y2[which(y2$theta < 0),] <- 0 y2[which(y2$theta > 1),] <- 1 params$theta <- y2[,1] library(foreach) library(doParallel) registerDoParallel(detectCores()) getDoParWorkers() set.seed(998468235L, kind="L'Ecuyer") foreach(p=iter(params,by='row'), .inorder=FALSE, .combine=bind_rows, .packages=c("tidyverse") ) %dopar% { library(pomp2) m0 %>% pfilter(params=p,Np=2000,save.states=TRUE) -> pf pf@saved.states %>% # latent state for each particle tail(1) %>% # last timepoint only melt() %>% # reshape and rename the state variables spread(variable,value) %>% group_by(rep) %>% dplyr::summarize( S_0=S, I_0=I, R1_0=R1, R2_0=R2 ) %>% gather(variable,value,-rep) %>% spread(rep,value) %>% column_to_rownames("variable") %>% as.matrix() -> x ## the final states are now stored in 'x' as initial conditions ## set up a matrix of parameters pp <- parmat(unlist(p),ncol(x)) ## generate simulations m0 %>% simulate(params=pp,format="data.frame",include.data=TRUE)%>% subset(select=c(dyear,.id, S, I , R1, R2))%>% mutate( data=.id=="data", loglik=logLik(pf) ) ->testSIRR ## set the initial conditions to the final states computed above pp[rownames(x),] <- x merge(testSIRR, testSIRR) } %>% mutate(weight=exp(loglik-mean(loglik))) %>% arrange(dyear,.id) -> sims ## look at effective sample size ess <- with(subset(sims,dyear==max(dyear)),weight/sum(weight)) ess <- 1/sum(ess^2); ess sims %>% group_by(dyear) %>% summarize( lower=wquant(S,weights=weight,probs=0.05), median=wquant(S,weights=weight,probs=0.5), upper=wquant(S,weights=weight,probs=0.95) ) -> simq.S sims %>% group_by(dyear) %>% summarize( lower=wquant(I,weights=weight,probs=0.05), median=wquant(I,weights=weight,probs=0.5), upper=wquant(I,weights=weight,probs=0.95) ) -> simq.I sims %>% group_by(dyear) %>% summarize( lower=wquant(R1,weights=weight,probs=0.05), median=wquant(R1,weights=weight,probs=0.5), upper=wquant(R1,weights=weight,probs=0.95) ) -> simq.R1 sims %>% group_by(dyear) %>% summarize( lower=wquant(R2,weights=weight,probs=0.05), median=wquant(R2,weights=weight,probs=0.5), upper=wquant(R2,weights=weight,probs=0.95) ) -> simq.R2 save.image("./out/muddy_m0_04Aug_unc_SIRR_04Aug19.RData")