# --> code for part 1 (a) of discussion section 6 starts here <-- seed <- 1 set.seed( seed ) M <- 100 n <- 112 s <- 4 alpha.prior <- 0.5 beta.prior <- 0.5 alpha.posterior <- alpha.prior + s beta.posterior <- beta.prior + n - s theta.star <- rbeta( M, alpha.posterior, beta.posterior ) mean( theta.star ) 0.04312408 # simulated posterior mean alpha.posterior / ( alpha.posterior + beta.posterior ) 0.03982301 # actual posterior mean sd( theta.star ) 0.01930338 # simulated posterior sd sqrt( alpha.posterior * beta.posterior / ( ( alpha.posterior + beta.posterior )^2 * ( alpha.posterior + beta.posterior + 1 ) ) ) 0.0183143 # actual posterior sd hist( theta.star, breaks = 20, prob = T, xlab = 'theta', main = '', xlim = c( 0, 0.12 ), ylim = c( 0, 35 ) ) theta.grid <- seq( 0, 0.12, length = 500 ) lines( theta.grid, dbeta( theta.grid, alpha.posterior, beta.posterior ), lty = 1, lwd = 2, col = 'red' ) # you can see that with only M = 100 simulated draws, # the monte carlo method is not very accurate, # but we can solve that problem easily, just by increasing M # now try running this same code with two new random number seeds # of your own choosing, and make a table like this: # M = 100 # # ------------- posterior ------------ # # seed mean: true sd: true density # value value # 0.03982301 0.01831430 # # 1 0.04312408 0.01930338 fairly close # 314159 0.03903875 0.01812561 fairly close # 85819 0.03903054 0.01658482 not so close # --> code for part 1 (a) of discussion section 6 ends here <-- # in part 1 (b), rerun the same code with the same seeds, # but this time with M = 1 million # M = 1000000 # # ---------------- posterior --------------- # # seed mean: true sd: true density # value value # 0.03982301 0.01831430 # # 1 0.03986105 0.01835710 just about perfect # 314159 0.03984258 0.01833042 just about perfect # 85819 0.03982245 0.01829750 just about perfect # --> code for part 1 (c) of discussion section 6 starts here <-- seed <- 1 set.seed( seed ) M <- 100 n <- 112 s <- 4 alpha.prior <- 0.5 beta.prior <- 0.5 alpha.posterior <- alpha.prior + s beta.posterior <- beta.prior + n - s theta.star <- rbeta( M, alpha.posterior, beta.posterior ) print( theta.bar.star <- mean( theta.star ) ) 0.04312408 # simulated posterior mean print( mcse.theta.bar.star <- sd( theta.star ) / sqrt( M ) ) 0.001930338 c( theta.bar.star - 2 * mcse.theta.bar.star, theta.bar.star + 2 * mcse.theta.bar.star ) # 0.03926340 0.04698476 # so we would report a monte-carlo estimate of the # posterior mean of 0.0431, with a monte-carlo standard error # of 0.0019, meaning that we're 95% monte-carlo confident # that the true posterior mean is somewhere between # about 0.0393 and 0.0470 alpha.posterior / ( alpha.posterior + beta.posterior ) 0.03982301 # actual posterior mean # and you can see that the truth did indeed happen # to fall within our monte carlo interval with this # random number seed: we got a hit seed <- 1 set.seed( seed ) M <- 1000000 n <- 112 s <- 4 alpha.prior <- 0.5 beta.prior <- 0.5 alpha.posterior <- alpha.prior + s beta.posterior <- beta.prior + n - s theta.star <- rbeta( M, alpha.posterior, beta.posterior ) print( theta.bar.star <- mean( theta.star ) ) 0.03986105 # simulated posterior mean print( mcse.theta.bar.star <- sd( theta.star ) / sqrt( M ) ) 1.83571e-05 # otherwise known as 0.00002 c( theta.bar.star - 2 * mcse.theta.bar.star, theta.bar.star + 2 * mcse.theta.bar.star ) # 0.03982433 0.03989776 alpha.posterior / ( alpha.posterior + beta.posterior ) 0.03982301 # actual posterior mean # this time we got a miss (barely), but you can see # that the MCSE has done its job well, in giving us # a decent idea of the level of monte carlo noise # with M = 1 million # --> code for part 1 (c) of discussion section 6 ends here <--