Jim Albert
October 1, 2013
y <- c(3, 4, 1, 2, 2, 1, 3, 3, 3, 4, 3, 2,
1, 1, 3, 2, 1, 2, 2, 1, 4, 2)
zero.poisson <- function(theta, y){
lambda <- exp(theta)
n <- length(y)
sum(y * log(lambda)) - n * lambda - n * log(1 - exp(-lambda))
}
lam <- seq(0, 1.4, by=0.01)
g.lam <- exp(sapply(lam, zero.poisson, y))
g.lam <- g.lam / sum(g.lam) / .01
plot(lam, g.lam, type="l")
library(LearnBayes)
fit <- laplace(zero.poisson, 1, y)
fit
$mode
[1] 0.6674
$var
[,1]
[1,] 0.02957
$int
[1] -6.974
$converge
[1] TRUE
paste("log Lambda is distributed approximately normal with mean",
round(fit$mode[1], 2), "and standard deviation",
round(sqrt(fit$var), 2))
[1] "log Lambda is distributed approximately normal with mean 0.67 and standard deviation 0.17"