Chapter 3 Single-Parameter Models
3.1 Normal Distribution with Known Mean but Unknown Variance
Assuming we have a sample {\(y_j\)} from a normal distribution with mean 0 and variance \(\sigma^2\). Assuming the prior \(g(\sigma^2) \propto 1/\sigma^2\), simulating from the posterior.
library(LearnBayes)
<- with(footballscores,
d - underdog - spread)
favorite <- length(d)
n <- sum(d ^ 2) v
<- rchisq(1000, n) / v
P <- sqrt(1 / P)
s hist(s)
quantile(s, probs = c(0.025, 0.5, 0.975))
## 2.5% 50% 97.5%
## 13.14654 13.85587 14.62902
3.2 Estimating a Heart Transplant Mortality Rate
Have a sample {\(y_j\)} from a Poisson(\(e \lambda\)) distribution where the exposure \(e\) is known. Assigning \(\lambda\) a gamma(\(\alpha, \beta\)) prior.
Predictive density:
<- 16; beta <- 15174
alpha <- 1; ex <- 66
yobs <- 0:10
y <- alpha / beta
lam <- dpois(y, lam * ex) *
py dgamma(lam, shape = alpha, rate = beta) /
dgamma(lam, shape = alpha + y, rate = beta + ex)
cbind(y, round(py, 3))
## y
## [1,] 0 0.933
## [2,] 1 0.065
## [3,] 2 0.002
## [4,] 3 0.000
## [5,] 4 0.000
## [6,] 5 0.000
## [7,] 6 0.000
## [8,] 7 0.000
## [9,] 8 0.000
## [10,] 9 0.000
## [11,] 10 0.000
Posterior density:
<- rgamma(1000, shape = alpha + yobs,
lambdaA rate = beta + ex)
Data from a different hospital:
<- 1767; yobs <-4
ex <- 0:10
y <- dpois(y, lam * ex) *
py dgamma(lam, shape = alpha, rate = beta) /
dgamma(lam, shape = alpha + y, rate = beta + ex)
cbind(y, round(py, 3))
## y
## [1,] 0 0.172
## [2,] 1 0.286
## [3,] 2 0.254
## [4,] 3 0.159
## [5,] 4 0.079
## [6,] 5 0.033
## [7,] 6 0.012
## [8,] 7 0.004
## [9,] 8 0.001
## [10,] 9 0.000
## [11,] 10 0.000
<- rgamma(1000, shape = alpha + yobs,
lambdaB rate = beta + ex)
Prior and posteriors for two hospitals:
par(mfrow = c(2, 1))
plot(density(lambdaA), main="HOSPITAL A",
xlab="lambdaA", lwd=3)
curve(dgamma(x, shape = alpha, rate = beta),
add=TRUE)
legend("topright",legend=c("prior","posterior"),
lwd=c(1,3))
plot(density(lambdaB), main="HOSPITAL B",
xlab="lambdaB", lwd=3)
curve(dgamma(x, shape = alpha, rate = beta),
add=TRUE)
legend("topright",legend=c("prior","posterior"),
lwd=c(1,3))
3.3 An Illustration of Bayesian Robustness
Assuming normal sampling (known standard deviation), compare the use of two priors on the mean \(\mu\).
<- list(p=.5, x=100)
quantile1 <- list(p=.95, x=120)
quantile2 normal.select(quantile1, quantile2)
## $mu
## [1] 100
##
## $sigma
## [1] 12.15914
<- 100
mu <- 12.16
tau <- 15
sigma <- 4
n <- sigma / sqrt(4)
se <- c(110, 125, 140)
ybar <- 1 / sqrt(1 / se ^ 2 + 1 / tau ^ 2)
tau1 <- (ybar / se ^ 2 + mu / tau ^ 2) * tau1 ^ 2
mu1 <- cbind(ybar, mu1, tau1)
summ1 summ1
## ybar mu1 tau1
## [1,] 110 107.2442 6.383469
## [2,] 125 118.1105 6.383469
## [3,] 140 128.9768 6.383469
Compare two possible priors for \(\mu\):
<- 20 / qt(0.95, 2)
tscale tscale
## [1] 6.849349
par(mfrow=c(1, 1))
curve(1 / tscale * dt((x - mu) / tscale, 2),
from=60, to=140, xlab="theta",
ylab="Prior Density")
curve(dnorm(x, mean=mu, sd=tau), add=TRUE, lwd=3)
legend("topright", legend=c("t density",
"normal density"),
lwd=c(1,3))
<- function(ybar){
norm.t.compute <- seq(60, 180, length = 500)
theta <- dnorm(theta, mean=ybar,
like sd=sigma/sqrt(n))
<- dt((theta - mu) / tscale, 2)
prior <- prior * like
post <- post / sum(post)
post <- sum(theta * post)
m <- sqrt(sum(theta ^ 2 * post) - m ^ 2)
s c(ybar, m, s)
}
<- t(sapply(c(110, 125, 140),
summ2
norm.t.compute))dimnames(summ2)[[2]] <- c("ybar", "mu1 t",
"tau1 t")
summ2
## ybar mu1 t tau1 t
## [1,] 110 105.2921 5.841676
## [2,] 125 118.0841 7.885174
## [3,] 140 135.4134 7.973498
cbind(summ1, summ2)
## ybar mu1 tau1 ybar mu1 t tau1 t
## [1,] 110 107.2442 6.383469 110 105.2921 5.841676
## [2,] 125 118.1105 6.383469 125 118.0841 7.885174
## [3,] 140 128.9768 6.383469 140 135.4134 7.973498
Compare two posterior densities:
<- seq(60, 180, length=500)
theta <- dnorm(theta, mu1[3], tau1)
normpost <- normpost / sum(normpost)
normpost plot(theta, normpost, type="l", lwd=3,
ylab="Posterior Density")
<- dnorm(theta, mean=140, sd=sigma / sqrt(n))
like <- dt((theta - mu) / tscale, 2)
prior <- prior * like / sum(prior * like)
tpost lines(theta, tpost)
legend("topright", legend=c("t prior",
"normal prior"),
lwd=c(1,3))
3.4 Mixtures of Conjugate Priors
Use a mixture of beta curves to reflect beliefs that a particular coin is biased.
curve(.5 * dbeta(x, 6, 14) + .5 * dbeta(x, 14, 6),
from=0, to=1, xlab="P", ylab="Density")
<- c(.5, .5)
probs <- c(6, 14)
beta.par1 <- c(14, 6)
beta.par2 <- rbind(beta.par1, beta.par2)
betapar <- c(7, 3)
data <- binomial.beta.mix(probs, betapar, data)
post post
## $probs
## beta.par1 beta.par2
## 0.09269663 0.90730337
##
## $betapar
## [,1] [,2]
## beta.par1 13 17
## beta.par2 21 9
Compare prior and posterior densities for the probability coin lands heads.
curve(post$probs[1] * dbeta(x,13,17) +
$probs[2] * dbeta(x,21,9),
postfrom=0, to=1, lwd=3,
xlab="P", ylab="DENSITY")
curve(.5 * dbeta(x, 6, 12) +
5 * dbeta(x, 12, 6), 0, 1, add=TRUE)
.legend("topleft", legend=c("Prior", "Posterior"),
lwd=c(1, 3))
3.5 A Bayesian Test of the Fairness of a Coin
Testing if a coin is fair. Observe 5 heads in 20 flips.
P-value calculation:
pbinom(5, 20, 0.5)
## [1] 0.02069473
Bayesian test of fairness using a mixture prior.
<- 20
n <- 5
y <- 10
a <- 0.5
p <- dbinom(y, n, p) * dbeta(p, a, a) /
m1 dbeta(p, a + y, a + n - y)
<- dbinom(y, n, p) / (dbinom(y, n, p) + m1)
lambda lambda
## [1] 0.2802215
pbetat(p, .5, c(a, a), c(y, n - y))
## $bf
## [1] 0.3893163
##
## $post
## [1] 0.2802215
<- function(log.a){
prob.fair <- exp(log.a)
a <- dbinom(y, n, p) * dbeta(p, a, a) /
m2 dbeta(p, a + y, a + n - y)
dbinom(y, n, p) / (dbinom(y, n, p) + m2)
}
<- 20; y <- 5; p <- 0.5
n curve(prob.fair(x), from = -4, to = 5,
xlab="log a",
ylab="Prob(coin is fair)", lwd=2)
<- 20; y <- 5
n <- 10; p <- .5
a <- 0
m2 for (k in 0:y){
<- m2 + dbinom(k, n, p) * dbeta(p, a, a) /
m2 dbeta(p, a + k, a + n - k)
}<- pbinom(y, n, p) / (pbinom(y, n, p) + m2)
lambda lambda
## [1] 0.2184649