
# file "Radapt" -- an adaptive Metropolis algorithm

# specify the target covariance and mean
dim = 5
tmpmat = matrix(
c( 0.458731768, -1.138985820,  0.784868032, -1.017293716, -0.723827196,
 0.564371814, -0.155166457,  0.902601504,  0.186805644,  0.952120668,
 1.486382516,  1.458366992,  0.709626067, -1.138458393,  0.897889709,
 0.431802958, -0.721054075, -0.656762999,  1.940817630, -1.015356510,
 0.014969681,  0.422744911,  0.073882617, -0.007217515,  0.664031927),
								nrow=dim )
targSigma = t(tmpmat) %*% tmpmat
targMean = c( 0.2133594,  0.5641370, -0.8031936, -1.1818973, -0.7307054 )
identity = diag(dim)

library(mvtnorm)

logpi = function(X) { dmvnorm(X, targMean, targSigma, log=TRUE) }

h = function(X) { return(X[1]^2) }

M = 40000  # run length
B = 20000  # amount of burn-in
X = runif(dim,5,15)  # overdispersed starting distribution
x1list = rep(0,M)  # for keeping track of chain values
x2list = rep(0,M)  # for keeping track of chain values
Xveclist = matrix( rep(0,M*dim), ncol=dim)  # full chain values
hlist = rep(0,M)  # for keeping track of functional values
slowdown = rep(0,M)  # for keeping track of slowdown factors
numaccept = 0;
epsilon = 0.05;
mult = 1.4;  # (optimal scale ... close to 1 in this case ...)
# mult = (2.38)^2 / dim;  # (optimal scale ... close to 1 in this case ...)

# Run the adaptive Metropolis algorithm.
cat("Running the adaptive Metropolis algorithm for", M, "iterations ...\n");
for (i in 1:M) {
    if (i < dim^2)
      propSigma = 0.1 * identity
    else {
      covsofar = cov( Xveclist[ (1:(i-1)) ,] )
      propSigma = mult * covsofar + epsilon * identity
      # Compute slow-down factor.
      evals = eigen(covsofar %*% solve(targSigma))$values
      absevals = abs(evals)
      slowdown[i] = dim * sum(1/absevals) / (sum(1/sqrt(absevals)))^2
    }
    Y = X + rmvnorm(1, sigma = propSigma)  # proposal value
    U = runif(1)  # for accept/reject
    # Use log scale to avoid zeroes.
    if (log(U) < logpi(Y) - logpi(X)) {
	X = Y  # accept proposal
        numaccept = numaccept + 1;
    }
    x1list[i] = X[1];
    x2list[i] = X[2];
    Xveclist[i,] = X;
    hlist[i] = h(X);
    # Output progress report.
    if ((i %% 100) == 0)
      cat (" ...",i);
}

# Output results.
cat("\n\nRan the adaptive Metropolis algorithm for", M,
			"iterations, with burn-in", B, "\n");
cat("Acceptance rate =", numaccept/M, "\n");
estmean = mean(hlist[(B+1):M]);
cat("Estimated mean of h is ", estmean, "\n")
cat("True mean of h is ", targMean[1]^2 + targSigma[1,1], "\n");

# Compute standard errors & confidence intervals.
se1 =  sd(hlist[(B+1):M]) / sqrt(M-B)
cat("iid standard error would be about", se1, "\n")
varfact <- function(xxx) { 2 * sum(acf(xxx, plot=FALSE, lag.max=250)$acf) - 1 }
se2 = se1 * sqrt( varfact(hlist[(B+1):M]) );
cat("true standard error is about", se2, "\n")
cat("95% confidence interval is about (", 
      estmean - 1.96*se2, ",", estmean + 1.96*se2, ")\n")
if (M>4000)
  cat("varfact of last 4000 iterations:", varfact(hlist[(M-4000+1):M]), "\n")

# Compute the varfact piece-by-piece:
batchwidth = 1000
numbatches = floor(M/batchwidth)
varfactlist = rep(0,numbatches)
for (i in 1:numbatches)
  varfactlist[i] = varfact(hlist[(batchwidth*(i-1)+1) : (batchwidth*i)])

# Plot the run.
# targSigma
# cov(Xveclist[(B+1):M,])
plot(x1list, type='l')
# plot(x1list[(B+1):M], type='l')
# plot(slowdown[(dim^2+1):M], type='l')
# plot(varfactlist, type='b')
# acf(hlist[(B+1):M],lag.max=250)


