#ST407 Monte Carlo Methods 2012
#Practical 6: Gibbs Samplign and Blocking
#Interested in sampling from a trivariate Multivariate Normal
#2 of the components are correlated, the other is independent of them
#Question 1
SimpleGibbs<-function(N=1000, rho=0.01, X0=c(0,0,0)){
#Function to sample from trivariate distribution of interest
#N=1000, number of iterations in Gibbs, the number of samples sometimes
#rho= correlation/covariance between the first two components
#X0 = initialisation values of the MC
x<-matrix(nrow=3, ncol=N) #Storage for the valuesm of the chain
#Row = component
#Columns = iteration
x[,1]<- X0 #Initialisation values
for(i in 2:N){
x[1,i] <-rnorm(1, mean=rho*x[2, i-1], sd=sqrt(1-rho^2)) #Full conditional of x(1)
x[2,i] <-rnorm(1, mean=rho*x[1, i], sd=sqrt(1-rho^2)) #Full conditional of x(2), on the MOST RECENT VALUE of x(1)
x[3,i] <-rnorm(1, mean=0, 1) #Full conditional of x(2), which is independent of the others
}
x
}
#rho=0.01
qu1.r0.01<-SimpleGibbs(N=1000, rho=0.01)
par(mfrow=c(3,1), mar=c(4,4,2,2))
plot(qu1.r0.01[1,], type="l", ylab="x1")
plot(qu1.r0.01[2,], ylab="x3", type="l")
plot(qu1.r0.01[3,], ylab="x3", type="l")
acf(t(qu1.r0.01))
#Plots and acf indicate little correlation within and amongst components
#rho=-0.99
qu1.r0.99<-SimpleGibbs(N=1000, rho=-0.99)
par(mfrow=c(3,1), mar=c(4,4,2,2))
plot(qu1.r0.99[1,], type="l", ylab="x1")
plot(qu1.r0.99[2,], ylab="x3", type="l")
plot(qu1.r0.99[3,], ylab="x3", type="l")
acf(t(qu1.r0.99))
#Traceplots display the negative correlation between components of x(1) and x(2)
#However,the movement within each components chain shows small movements which suggests that the chain is not mixing well (exploring the state space), and producing iid samples from the target distribution of interest.
#The acf plots indicate that the within chain correlation is significant and not producing iid samples.
#Part b
ReparamGibbs<-function(N=1000, rho=0.01, X0=c(0,0,0)){
#Gibbs sampler to sample from trivariate Normal under reparameterisation
#N= number of iterations
#rho=correlation between components 1 and 2
#X0=initialisation of chain
x<-matrix(nrow=3, ncol=N) #Samples from target
y<-matrix(nrow=3, ncol=N) #Samples from the reparameterisation
x[,1]<-X0 #Initialisation for x
#Initial for y under reparameterisation, I actually don't this necessary really
y[1,1]<-(x[1,1]+x[2,1])/sqrt(2)
y[2,1]<-(x[1,1]-x[2,1])/sqrt(2)
y[3,1]<-x[3,1]
for(i in 2:N){
y[1,i]<-rnorm(1, mean=0, sd=sqrt(1+rho))
y[2,i]<-rnorm(1, mean=0, sd=sqrt(1-rho))
y[3,i]<-rnorm(1, mean=0, sd=1)
}
#To obtain the samples x that we actually want from the target distribution
x[1,]<-(y[1,] + y[2,])/sqrt(2)
x[2,]<-(y[1,] - y[2,])/sqrt(2)
x[3,]<-y[3,]
x
}
qu1.reGibbs.r0.99<-ReparamGibbs(N=1000, rho=-0.99, X0=c(0,0,0))
par(mfrow=c(3,1), mar=c(4,4,2,2))
plot(qu1.reGibbs.r0.99[1,], type="l", ylab="x1")
plot(qu1.reGibbs.r0.99[2,], type="l", ylab="x2")
plot(qu1.reGibbs.r0.99[3,], type="l", ylab="x3")
acf(t(qu1.reGibbs.r0.99))
#Plots indicate the negative correlation is only present at the same lag i.e. within the same iteration
#However, the chains appear to be mixing a lot better and producing iid samples.
#Lack of correlation within the chain is displayed in the ACF plots.
#Question 2, Part
library(MASS)
BlockGibbs<-function(N=1000, rho=0.01, X0=c(0,0,0)){
#Sampling from trivariatie normal by blocking Gibbs sampler
x<-matrix(nrow=3, ncol=N)
x[,1]<-X0
covar<-matrix(c(1, rho, rho, 1), nrow=2, ncol=2)
for(i in 2:N){
x[c(1,2), i]<-mvrnorm(1, mu=c(0,0), covar) #Sampling the bivariate full condition for the first two components
x[3, i]<-rnorm(1,0,1) #Univariate full condition for component 3
}
x
}
qu2.BG.r0.99<-BlockGibbs(N=1000, rho=-0.99, X0=c(0,0,0))
par(mfrow=c(3,1), mar=c(4,4,2,2))
plot(qu2.BG.r0.99[1,], type="l", ylab="x1")
plot(qu2.BG.r0.99[2,], type="l", ylab="x1")
plot(qu2.BG.r0.99[3,], type="l", ylab="x1")
acf(t(qu2.BG.r0.99))
#No autocorrelation is observed between the chain other than at the same lag, thus produces independent samples from the target distribution.
#An alterative in producing independent samples from target distribution.
#It is always desirable to reduce the dependency between successive states of the chain, brining them as near to independence as possible.