Chapter 8 Model Comparison
8.1 A One-Sided Test of a Normal Mean
Bayesian testing of \(\mu \le \mu_0\) against \(\mu > \mu_0\).
library(LearnBayes)pmean <- 170
pvar <- 25
probH <- pnorm(175, pmean, sqrt(pvar))
probA <- 1 - probH
prior.odds <- probH / probA
prior.odds## [1] 5.302974
weights <- c(182, 172, 173, 176, 176, 180,
173, 174, 179, 175)
xbar <- mean(weights)
sigma2 <- 3 ^ 2 / length(weights)post.precision <- 1 / sigma2 + 1 / pvar
post.var <- 1 / post.precisionpost.mean <- (xbar / sigma2 + pmean / pvar) /
post.precision
c(post.mean, sqrt(post.var))## [1] 175.7915058 0.9320546
post.odds <- pnorm(175, post.mean,
sqrt(post.var)) /
(1 - pnorm(175, post.mean,
sqrt(post.var)))
post.odds## [1] 0.2467017
BF <- post.odds / prior.odds
BF## [1] 0.04652139
postH <- probH * BF / (probH * BF + probA)
postH## [1] 0.1978835
Contrast with a frequentist p-value calculation.
z <- sqrt(length(weights)) *
(mean(weights) - 175) / 3
1 - pnorm(z)## [1] 0.1459203
weights <- c(182, 172, 173, 176, 176, 180,
173, 174, 179, 175)
data <- c(mean(weights), length(weights), 3)
prior.par <- c(170, 1000)
mnormt.onesided(175, prior.par, data)## $BF
## [1] 0.1694947
##
## $prior.odds
## [1] 1.008011
##
## $post.odds
## [1] 0.1708525
##
## $postH
## [1] 0.1459215
8.2 A Two-Sided Test of a Normal Mean
Bayesian testing of \(\mu = \mu_0\) against \(\mu \neq \mu_0\).
weights <- c(182, 172, 173, 176, 176, 180,
173, 174, 179, 175)data <- c(mean(weights), length(weights), 3)
t <- c(.5, 1, 2, 4, 8)
mnormt.twosided(170, .5, t, data)## $bf
## [1] 1.462146e-02 3.897038e-05 1.894326e-07 2.591162e-08 2.309739e-08
##
## $post
## [1] 1.441076e-02 3.896887e-05 1.894325e-07 2.591162e-08 2.309739e-08
8.3 Models for Soccer Goals
Illustrates the use of the marginal likelihood to compare several Bayesian models for soccer goals.
datapar <- list(data=soccergoals$goals,
par=c(4.57, 1.43))
fit1 <- laplace(logpoissgamma, .5, datapar)
datapar <- list(data=soccergoals$goals,
par=c(1, .5))
fit2 <- laplace(logpoissnormal, .5, datapar)
datapar <- list(data=soccergoals$goals,
par=c(2, .5))
fit3 <- laplace(logpoissnormal, .5, datapar)
datapar <- list(data=soccergoals$goals,
par=c(1, 2))
fit4 <- laplace(logpoissnormal, .5, datapar)postmode <- c(fit1$mode, fit2$mode, fit3$mode,
fit4$mode)
postsd <- sqrt(c(fit1$var, fit2$var, fit3$var,
fit4$var))
logmarg <- c(fit1$int, fit2$int, fit3$int,
fit4$int)
cbind(postmode,postsd,logmarg)## postmode postsd logmarg
## [1,] 0.5248047 0.1274414 -1.502977
## [2,] 0.5207825 0.1260712 -1.255171
## [3,] 0.5825195 0.1224723 -5.076316
## [4,] 0.4899414 0.1320165 -2.137216
8.4 Is a Baseball Hitter Really Streaky?
Defines a family of streaky models to measure the level of support for streakiness by a Bayes factor.
data <- cbind(jeter2004$H, jeter2004$AB)
data1 <- regroup(data, 5)log.marg <- function(logK){
laplace(bfexch, 0,
list(data=data1, K=exp(logK)))$int
}log.K <- seq(2, 6)
K <- exp(log.K)
log.BF <- sapply(log.K, log.marg)
BF <- exp(log.BF)
round(data.frame(log.K, K, log.BF, BF), 2)## log.K K log.BF BF
## 1 2 7.39 -4.04 0.02
## 2 3 20.09 0.17 1.19
## 3 4 54.60 0.92 2.51
## 4 5 148.41 0.57 1.78
## 5 6 403.43 0.26 1.29
8.5 A Test of Independence in a Two-Way Contingency Table
Constructs several Bayes factor statistics for two-way contingency tables.
data <- matrix(c(11, 9, 68, 23, 3, 5),
c(2, 3))
data## [,1] [,2] [,3]
## [1,] 11 68 3
## [2,] 9 23 5
Traditional chi-square test of independence.
chisq.test(data)## Warning in chisq.test(data): Chi-squared approximation may be incorrect
##
## Pearson's Chi-squared test
##
## data: data
## X-squared = 6.9264, df = 2, p-value = 0.03133
Bayes factor against independence using uniform priors.
a=matrix(rep(1, 6), c(2, 3))
a## [,1] [,2] [,3]
## [1,] 1 1 1
## [2,] 1 1 1
ctable(data, a)## [1] 1.662173
Consider Bayes factors against independence for alternatives close to independence.
log.K <- seq(2,7)
compute.log.BF <- function(log.K){
log(bfindep(data, exp(log.K), 100000)$bf)
}
log.BF <- sapply(log.K, compute.log.BF)
BF <- exp(log.BF)round(data.frame(log.K, log.BF, BF), 2)## log.K log.BF BF
## 1 2 -1.51 0.22
## 2 3 0.13 1.14
## 3 4 0.78 2.17
## 4 5 0.73 2.09
## 5 6 0.44 1.55
## 6 7 0.20 1.22