bayesruleexample.R

Jim — Aug 27, 2013, 11:41 AM

########################################################
# Illustration of function bayes to illustrate
# sequential learning in Bayes' rule
# Jim Albert, August 2013
########################################################

bayes <- function(prior, likelihood, data){
  probs <- matrix(0, length(data) + 1, length(prior))
  dimnames(probs)[[1]] <- c("prior", data)
  dimnames(probs)[[2]] <- names(prior)
  probs[1, ] <- prior
  for(j in 1:length(data))
    probs[j+1, ] <- probs[j, ] * likelihood[, data[j]] /
    sum(probs[j, ] * likelihood[, data[j]])
  dimnames(probs)[[1]] <-
    paste(0:length(data), dimnames(probs)[[1]])
  data.frame(probs)
}

# quality control example
# machine is either working or broken with prior probs .9 and .1

prior <- c(working = .9, broken = .1)

# outcomes are good (g) or broken (b)
# likelihood matrix gives probs of each outcome for each model

like.working <- c(g=.95, b=.05)
like.broken <- c(g=.7, b=.3)
likelihood <- rbind(like.working, like.broken)

# sequence of data outcomes

data <- c("g", "b", "g", "g", "g", "g", "g", "g", "g", "b", "g", "b")

# function bayes will computed the posteriors, one datum at a time
# inputs are the prior vector, likelihood matrix, and vector of data

posterior <- bayes(prior, likelihood, data)
posterior
        working  broken
0 prior  0.9000 0.10000
1 g      0.9243 0.07568
2 b      0.6706 0.32941
3 g      0.7342 0.26576
4 g      0.7894 0.21055
5 g      0.8358 0.16424
6 g      0.8735 0.12649
7 g      0.9036 0.09641
8 g      0.9271 0.07289
9 g      0.9452 0.05476
10 b     0.7421 0.25793
11 g     0.7961 0.20389
12 b     0.3942 0.60578

# graph the probs of working as a function of the data observation

plot(posterior$working, type="b")

plot of chunk unnamed-chunk-1