# install.packages("LearnBayes") library(LearnBayes) data("hearttransplants") e = hearttransplants$e y = hearttransplants$y ysum <- sum(y) esum <- sum(e) ############################################################### plot(e,y) print(ysum/esum) x <- seq(0, 0.002, length.out=1001) plot(x, dgamma(x, ysum, esum), type="l") plot(sort(ppois(y, e*ysum/esum))) # Conclusion: We need a hierarchical model. We use # y_i ~ Poisson(e_i*lambda_i) # lambda_i ~ Gamma(alpha, alpha/mu) # pi(alpha) = z0/(alpha + z0)^2 # pi(mu) = 1/mu # Simulation from model with parameters lambda_1,...,lambda_94,mu,alpha # is possible, but better to integrate out the lambda_i. # Then, we use the transformation theta1 = log(alpha), theta2 = log(mu) # The loglikelihood becomes z0 <- 1 #(In text: z0 = 0.53) loglik <- function(theta) { sum(lgamma(y+exp(theta[1])) - lgamma(exp(theta[1])) + (theta[1]-theta[2])*exp(theta[1]) - (exp(theta[1])+y)*log(e + exp(theta[1]-theta[2]))) - 2*log(exp(theta[1]) + z0) + theta[1] } N <- 10000 result <- matrix(c(0, 0), N, 2, byrow=TRUE) for (i in 2:N) { prop <- result[i-1,] + rnorm(2, 0, c(0.1, 0.1)) accept <- exp(loglik(prop) - loglik(result[i-1,])) if (runif(1)