bayesImageS::swNoData(..)

This is the first in a series of posts describing the functions and algorithms that I have implemented in the R
package bayesImageS.
Gibbs sampling was originally designed by Geman & Geman (1984) for drawing updates from the Gibbs distribution, hence the name. However, single-site Gibbs sampling exhibits poor mixing due to the posterior correlation between the pixel labels. Thus it is very slow to converge when the correlation (controlled by the inverse temperature β) is high.
The algorithm of Swendsen & Wang (1987) addresses this problem by forming clusters of neighbouring pixels, then updating all of the labels within a cluster to the same value. When simulating from the prior, such as a Potts model without an external field, this algorithm is very efficient.
The SW
function in the PottsUtils package is implemented in a combination of R
and C
. The swNoData
function in bayesImageS is implemented using RcppArmadillo, which gives it a speed advantage. It is worth noting that the intention of bayesImageS is not to replace PottsUtils. Rather, an efficient Swendsen-Wang algorithm is used as a building block for implementations of ABC (Grelaud et al., 2009), path sampling (Gelman & Meng, 1998), and the exchange algorithm (Murray et al., 2006). These other algorithms will be covered in future posts.
There are two things that we want to keep track of in this simulation study: the speed of the algorithm and the distribution of the summary statistic. We will be using system.time(..)
to measure both CPU and elapsed (wall clock) time taken for the same number of iterations, for a range of inverse temperatures:
beta <- seq(0, 2, by = 0.1) tmMx.PU <- tmMx.bIS <- matrix(nrow = length(beta), ncol = 2) rownames(tmMx.PU) <- rownames(tmMx.bIS) <- beta colnames(tmMx.PU) <- colnames(tmMx.bIS) <- c("user", "elapsed")
We will discard the first 100 iterations as burn-in and keep the remaining 500.
iter <- 600 burn <- 100 samp.PU <- samp.bIS <- matrix(nrow = length(beta), ncol = iter - burn)
The distribution of pixel labels can be summarised by the sufficient statistic of the Potts model:
where are all of the pairs of neighbours in the lattice (ie. the cliques) and
is 1 if
and 0 otherwise (the Kronecker delta function).
swNoData
returns this automatically, but with SW
we will need to use the function sufficientStat
to calculate the sufficient statistic for the labels.
library(bayesImageS) library(PottsUtils) mask <- matrix(1,50,50) neigh <- getNeighbors(mask, c(2,2,0,0)) block <- getBlocks(mask, 2) edges <- getEdges(mask, c(2,2,0,0)) n <- sum(mask) k <- 2 bcrit <- log(1 + sqrt(k)) maxSS <- nrow(edges) for (i in 1:length(beta)) { # PottsUtils tm <- system.time(result <- SW(iter,n,k,edges,beta=beta[i])) tmMx.PU[i,"user"] <- tm["user.self"] tmMx.PU[i,"elapsed"] <- tm["elapsed"] res <- sufficientStat(result, neigh, block, k) samp.PU[i,] <- res$sum[(burn+1):iter] print(paste("PottsUtils::SW",beta[i],tm["elapsed"],median(samp.PU[i,]))) # bayesImageS tm <- system.time(result <- swNoData(beta[i],k,neigh,block,c(0,n),iter)) tmMx.bIS[i,"user"] <- tm["user.self"] tmMx.bIS[i,"elapsed"] <- tm["elapsed"] samp.bIS[i,] <- result$sum[(burn+1):iter] print(paste("bayesImageS::swNoData",beta[i],tm["elapsed"],median(samp.bIS[i,]))) }
## [1] "PottsUtils::SW 0 9.07 2454"
## [1] "bayesImageS::swNoData 0 0.27 2455"
## [1] "PottsUtils::SW 0.1 8.48 2575.5"
## [1] "bayesImageS::swNoData 0.1 0.390000000000001 2572"
## [1] "PottsUtils::SW 0.2 8.01 2699"
## [1] "bayesImageS::swNoData 0.2 0.43 2700"
## [1] "PottsUtils::SW 0.3 7.39 2832"
## [1] "bayesImageS::swNoData 0.3 0.439999999999998 2832.5"
## [1] "PottsUtils::SW 0.4 6.88 2980"
## [1] "bayesImageS::swNoData 0.4 0.43 2976"
## [1] "PottsUtils::SW 0.5 6.34 3130"
## [1] "bayesImageS::swNoData 0.5 0.43 3129"
## [1] "PottsUtils::SW 0.6 5.85 3311"
## [1] "bayesImageS::swNoData 0.6 0.450000000000003 3310"
## [1] "PottsUtils::SW 0.7 5.14 3515.5"
## [1] "bayesImageS::swNoData 0.7 0.410000000000004 3516.5"
## [1] "PottsUtils::SW 0.8 4.57 3787"
## [1] "bayesImageS::swNoData 0.8 0.409999999999997 3784"
## [1] "PottsUtils::SW 0.9 3.94999999999999 4177.5"
## [1] "bayesImageS::swNoData 0.9 0.420000000000002 4178"
## [1] "PottsUtils::SW 1 3.41 4516.5"
## [1] "bayesImageS::swNoData 1 0.420000000000002 4524.5"
## [1] "PottsUtils::SW 1.1 3.22999999999999 4672.5"
## [1] "bayesImageS::swNoData 1.1 0.420000000000002 4683"
## [1] "PottsUtils::SW 1.2 3.08 4763"
## [1] "bayesImageS::swNoData 1.2 0.390000000000001 4768"
## [1] "PottsUtils::SW 1.3 3.16 4812"
## [1] "bayesImageS::swNoData 1.3 0.420000000000002 4810"
## [1] "PottsUtils::SW 1.4 3.00999999999999 4842"
## [1] "bayesImageS::swNoData 1.4 0.390000000000001 4841"
## [1] "PottsUtils::SW 1.5 2.98 4862.5"
## [1] "bayesImageS::swNoData 1.5 0.400000000000006 4863"
## [1] "PottsUtils::SW 1.6 3.01000000000001 4876"
## [1] "bayesImageS::swNoData 1.6 0.399999999999991 4876"
## [1] "PottsUtils::SW 1.7 2.98999999999999 4884"
## [1] "bayesImageS::swNoData 1.7 0.390000000000001 4884"
## [1] "PottsUtils::SW 1.8 2.97999999999999 4888"
## [1] "bayesImageS::swNoData 1.8 0.400000000000006 4889"
## [1] "PottsUtils::SW 1.9 2.92999999999999 4893"
## [1] "bayesImageS::swNoData 1.9 0.399999999999991 4893"
## [1] "PottsUtils::SW 2 2.91 4896"
## [1] "bayesImageS::swNoData 2 0.38000000000001 4896"
Here is the comparison of elapsed times between the two algorithms (in seconds):
summary(tmMx.PU)
## user elapsed
## Min. :2.92 Min. :2.91
## 1st Qu.:3.00 1st Qu.:3.01
## Median :3.37 Median :3.41
## Mean :4.72 Mean :4.73
## 3rd Qu.:6.34 3rd Qu.:6.34
## Max. :9.05 Max. :9.07
summary(tmMx.bIS)
## user elapsed
## Min. :0.270 Min. :0.270
## 1st Qu.:0.390 1st Qu.:0.390
## Median :0.400 Median :0.410
## Mean :0.404 Mean :0.404
## 3rd Qu.:0.430 3rd Qu.:0.420
## Max. :0.450 Max. :0.450
boxplot(tmMx.PU[,"elapsed"],tmMx.bIS[,"elapsed"],ylab="seconds elapsed",names=c("SW","swNoData"))
On average, swNoData
using RcppArmadillo is seven times faster than SW
.
library(lattice) s_z <- c(samp.PU,samp.bIS) s_x <- rep(beta,times=iter-burn) s_a <- rep(1:2,each=length(beta)*(iter-burn)) s.frame <- data.frame(s_z,c(s_x,s_x),s_a) names(s.frame) <- c("stat","beta","alg") s.frame$alg <- factor(s_a,labels=c("SW","swNoData")) xyplot(stat ~ beta | alg, data=s.frame)
plot(c(s_x,s_x),s_z,pch=s_a,xlab=expression(beta),ylab=expression(S(z))) abline(v=bcrit,col="red")
The overlap between the two distributions is almost complete, although it is a bit tricky to verify that statistically. The relationship between and
is nonlinear and heteroskedastic. As you can see above, it looks like some kind of monotonic logistic function.
rowMeans(samp.bIS) - rowMeans(samp.PU) plot(beta,rowMeans(samp.bIS) - rowMeans(samp.PU), ylab="residuals") abline(h=0,col="blue")
## [1] 2.832 1.914 -2.938 -2.526 1.940 0.098 4.782 -9.586 -1.828 -3.262 -2.170 0.622 -5.652 -1.938 0.266 -0.160 -0.810 0.070
## [19] -2.222 0.612 -0.076
apply(samp.PU, 1, sd)
## [1] 35.926102 35.512173 35.059970 37.198158 38.421849 42.263056 44.581491 46.706928 56.695264 75.187138 45.770567 32.748571 25.916751
## [14] 19.199334 15.329521 12.366351 9.012580 8.343045 6.055943 4.882581 4.621799
apply(samp.bIS, 1, sd)
## [1] 34.297109 33.822684 36.627712 38.201621 38.336899 42.124580 43.446136 47.698151 60.281811 70.725220 49.405012 33.745021 26.981461
## [14] 22.734424 15.204848 13.488972 10.091464 7.708416 7.125225 5.722184 4.628092
References
Gelman, A. & Meng, X-L (1998) Simulating normalizing constants: from importance sampling to bridge sampling to path sampling Statist. Sci. 13(2): 163-185.
Geman, S. and Geman, D. (1984) Stochastic relaxation, Gibbs distributions and the Bayesian restoration of images IEEE Trans. PAMI 6: 721-741.
Grelaud, A., Robert, C.P., Marin, J-M, Rodolphe, F. & Taly, J-F (2009) ABC likelihood-free methods for model choice in Gibbs random fields Bayesian Analysis 4(2): 317-335.
Murray, I., Ghahramani, Z. & MacKay, D.J.C. (2006) MCMC for Doubly-intractable Distributions In Proc. 22nd Conf. UAI 359-366.
Swendsen, R.H. & Wang, J-S (1987) Nonuniversal critical dynamics in Monte Carlo simulations Phys. Rev. Lett. 58(2): 86–88.
Trackbacks & Pingbacks