sept.13.R

albert — Sep 12, 2013, 12:46 PM

# September 13 work
# Learning about a proportion, part III

# Topics:
# - inferences about functions of parameters
# - noninformative priors for a proportion
# - brute force inference for a proportion

# Learning about a function of a proportion
# We'll work with same hurricane example -- posterior for p was beta(11.5, 28)

a <- 3.5 + 8; b <- 16 + 12

# Assume we're interested in the posterior for the odds p/(1-p)
# Easy to do this by simulation -- simulate draws of p from beta, and then
# transform these values -- transformed values have the right distribution

p <- rbeta(10000, a, b)
odds <- p / (1 - p)

# posterior density of odds

plot(density(odds))

plot of chunk unnamed-chunk-1


# using emp.hpd function from TeachingDemos package, find a 90% HPD interval for odds

library(TeachingDemos)
emp.hpd(odds, conf=0.90)
[1] 0.1951 0.6638

# noninformative priors for a proportion
# does it season reasonable to assign a uniform prior to p?

# the problem -- uniform for p does not imply uniform for the odds

# we illustrate

p <- runif(10000, 0, 1)
hist(p)

plot of chunk unnamed-chunk-1


odds <- p / (1-p)
hist(odds)

plot of chunk unnamed-chunk-1


# we'll talk about one general method of finding a prior and illustrate
# three noninformative priors for a proportion

# brute force method of computation

# suppose I think a coin is biased, but not sure it is biased towards H or T
# this prior reflects this belief

curve(0.5 * dbeta(x, 20, 10) + 0.5 * dbeta(x, 10, 20), 0, 1,
      xlab="Proportion", ylab="Density", main="Mixture Prior")

plot of chunk unnamed-chunk-1


# okay -- suppose we flip the coin 20 times and get 14 heads 
# want to plot and summarize the posterior

# set up a grid of proportion values

p <- seq(0.01, 0.99, 0.01)

# compute the prior and likelihood on the grid

prior <- 0.5 * dbeta(p, 20, 10) + 0.5 * dbeta(p, 10, 20)
likelihood <- dbinom(14, prob=p, size=20)

# find products and normalize

product <- prior * likelihood
posterior <- product / sum(product)

# graph posterior

plot(p, posterior, type="l")

plot of chunk unnamed-chunk-1


# summarize using discint function in LearnBayes
# input is a matrix with columns p and posterior probs

library(LearnBayes)
Attaching package: 'LearnBayes'
The following object is masked from 'package:TeachingDemos':

triplot
discint(cbind(p, posterior), 0.90)
$prob
[1] 0.9082

$set
 [1] 0.56 0.57 0.58 0.59 0.60 0.61 0.62 0.63 0.64 0.65 0.66 0.67 0.68 0.69
[15] 0.70 0.71 0.72 0.73 0.74 0.75 0.76 0.77 0.78 0.79

##########################################