Jim — Oct 22, 2013, 1:24 PM
#################################################################
# Poisson/Gamma Exchangeable Model Using
# JAGS and LearnBayes
# Example from BCWR
#################################################################
# First use JAGS
library(rjags)
Loading required package: coda
Loading required package: lattice
Linked to JAGS 3.4.0
Loaded modules: basemod,bugs
modelString = "
model{
for (i in 1:N){
y[i] ~ dpois(lam[i] * e[i])
lam[i] ~ dgamma(a, b)
}
a <- exp(log.a)
b <- exp(log.a) / mu
log.mu <- log(mu)
mu ~ dgamma(0.0001, 0.0001)
log.a ~ dlogis(-0.635, 1)
}"
# note that log.a distributed logistic(-0.635, 1) is the
# same as 'a' distributed according to 0.53 / (0.53 + a)^2
writeLines(modelString, con="poissonexch.bug")
# Define a list containing the data and prior parameters
library(LearnBayes)
N <- length(hearttransplants$y)
y <- hearttransplants$y
e <- hearttransplants$e
b.data <- list(y = y, e = e, N = N)
# Give initial values for the parameter
my.inits <- list(mu=sum(y) / sum(e), log.a=-1)
# Run the function jags.model to initialize the MCMC sampler
jags <- jags.model('poissonexch.bug',
data = b.data,
inits = my.inits,
n.chains = 1,
n.adapt = 100)
Compiling model graph
Resolving undeclared variables
Allocating nodes
Graph Size: 386
Initializing model
# Run the MCMC 1000 iterations as a burn-in
update(jags, 1000)
# Collect the simulated draws of the proportion for 10,000 iterations
posterior <- coda.samples(jags, c("log.a", "log.mu"),
n.iter=10000)
# Constructs trace and density plots of simulated draws (need coda package)
library(coda)
plot(posterior)
# Summarizes the MCMC output
summary(posterior)
Iterations = 1101:11100
Thinning interval = 1
Number of chains = 1
Sample size per chain = 10000
1. Empirical mean and standard deviation for each variable,
plus standard error of the mean:
Mean SD Naive SE Time-series SE
log.a 2.00 0.5163 0.005163 0.02941
log.mu -6.95 0.0764 0.000764 0.00207
2. Quantiles for each variable:
2.5% 25% 50% 75% 97.5%
log.a 1.14 1.63 1.93 2.3 3.15
log.mu -7.10 -7.00 -6.95 -6.9 -6.80
# look at autocorrelation plot
autocorr.plot(posterior)
# contrast with LearnBayes
# function poissgamexch contains posterior of (log(a), log(mu))
poissgamexch
function (theta, datapar)
{
y = datapar$data[, 2]
e = datapar$data[, 1]
z0 = datapar$z0
alpha = exp(theta[1])
mu = exp(theta[2])
beta = alpha/mu
logf = function(y, e, alpha, beta) lgamma(alpha + y) - (y +
alpha) * log(e + beta) + alpha * log(beta) - lgamma(alpha)
val = sum(logf(y, e, alpha, beta))
val = val + log(alpha) - 2 * log(alpha + z0)
return(val)
}
<environment: namespace:LearnBayes>
# define data and parameters needed in function
data <- cbind(e, y)
theta <- c(-4, 0)
datapar <- list(data=data, z0=0.53)
# Laplace fit
fit <- laplace(poissgamexch, theta, datapar)
# random walk Metropolis using proposal informed by Laplace fit
post <- rwmetrop(poissgamexch,
list(var=fit$var, scale=2),
fit$mode, 10000, datapar)
dimnames(post$par)[[2]] <- c('log alpha', 'log mu')
plot(mcmc(post$par))
summary(mcmc(post$par))
Iterations = 1:10000
Thinning interval = 1
Number of chains = 1
Sample size per chain = 10000
1. Empirical mean and standard deviation for each variable,
plus standard error of the mean:
Mean SD Naive SE Time-series SE
log alpha 2.05 0.5737 0.005737 0.01777
log mu -6.96 0.0725 0.000725 0.00195
2. Quantiles for each variable:
2.5% 25% 50% 75% 97.5%
log alpha 1.11 1.65 2.00 2.35 3.38
log mu -7.10 -7.01 -6.96 -6.91 -6.81
autocorr.plot(mcmc(post$par))
# Get similar summaries of posterior of log(a) and log(mu)
# So we appear to be getting same answers using JAGS and LearnBayes
# But LearnBayes random walk seems to mix better than JAGS