1\name{rordprobitGibbs} 2\alias{rordprobitGibbs} 3\concept{bayes} 4\concept{MCMC} 5\concept{probit} 6\concept{Gibbs Sampling} 7 8\title{Gibbs Sampler for Ordered Probit} 9 10\description{ 11\code{rordprobitGibbs} implements a Gibbs Sampler for the ordered probit model with a RW Metropolis step for the cut-offs. 12} 13 14\usage{rordprobitGibbs(Data, Prior, Mcmc)} 15 16\arguments{ 17 \item{Data }{list(y, X, k)} 18 \item{Prior}{list(betabar, A, dstarbar, Ad)} 19 \item{Mcmc }{list(R, keep, nprint, s)} 20} 21 22\details{ 23\subsection{Model and Priors}{ 24 \eqn{z = X\beta + e} with \eqn{e} \eqn{\sim}{~} \eqn{N(0, I)}\cr 25 \eqn{y = k} if c[k] \eqn{\le z <} c[k+1] with \eqn{k = 1,\ldots,K} \cr 26 cutoffs = \{c[1], \eqn{\ldots}, c[k+1]\} 27 28 \eqn{\beta} \eqn{\sim}{~} \eqn{N(betabar, A^{-1})} \cr 29 \eqn{dstar} \eqn{\sim}{~} \eqn{N(dstarbar, Ad^{-1})} 30 31 Be careful in assessing prior parameter \code{Ad}: 0.1 is too small for many applications. 32} 33\subsection{Argument Details}{ 34 \emph{\code{Data = list(y, X, k)}} 35 \tabular{ll}{ 36 \code{y: } \tab \eqn{n x 1} vector of observations, (\eqn{1, \ldots, k}) \cr 37 \code{X: } \tab \eqn{n x p} Design Matrix \cr 38 \code{k: } \tab the largest possible value of y 39 } 40 \emph{\code{Prior = list(betabar, A, dstarbar, Ad)} [optional]} 41 \tabular{ll}{ 42 \code{betabar: } \tab \eqn{p x 1} prior mean (def: 0) \cr 43 \code{A: } \tab \eqn{p x p} prior precision matrix (def: 0.01*I) \cr 44 \code{dstarbar: } \tab \eqn{ndstar x 1} prior mean, where \eqn{ndstar=k-2} (def: 0) \cr 45 \code{Ad: } \tab \eqn{ndstar x ndstar} prior precision matrix (def: I) 46 } 47 \emph{\code{Mcmc = list(R, keep, nprint, s)} [only \code{R} required]} 48 \tabular{ll}{ 49 \code{R: } \tab number of MCMC draws \cr 50 \code{keep: } \tab MCMC thinning parameter -- keep every \code{keep}th draw (def: 1) \cr 51 \code{nprint: } \tab print the estimated time remaining for every \code{nprint}'th draw (def: 100, set to 0 for no print) \cr 52 \code{s: } \tab scaling parameter for RW Metropolis (def: 2.93/\code{sqrt(p)}) 53 } 54} 55} 56 57\value{ 58 A list containing: 59 \item{betadraw }{ \eqn{R/keep x p} matrix of betadraws} 60 \item{cutdraw }{ \eqn{R/keep x (k-1)} matrix of cutdraws} 61 \item{dstardraw }{ \eqn{R/keep x (k-2)} matrix of dstardraws} 62 \item{accept }{ acceptance rate of Metropolis draws for cut-offs} 63} 64 65\note{ 66 set c[1] = -100 and c[k+1] = 100. c[2] is set to 0 for identification. \cr 67 68 The relationship between cut-offs and dstar is: \cr 69 c[3] = exp(dstar[1]), \cr 70 c[4] = c[3] + exp(dstar[2]), ..., \cr 71 c[k] = c[k-1] + exp(dstar[k-2]) 72} 73 74\references{\emph{Bayesian Statistics and Marketing} by Rossi, Allenby, and McCulloch \cr \url{http://www.perossi.org/home/bsm-1}} 75 76\author{ Peter Rossi, Anderson School, UCLA, \email{perossichi@gmail.com}.} 77 78\seealso{ \code{\link{rbprobitGibbs}} } 79 80\examples{ 81if(nchar(Sys.getenv("LONG_TEST")) != 0) {R=2000} else {R=10} 82set.seed(66) 83 84## simulate data for ordered probit model 85 86simordprobit=function(X, betas, cutoff){ 87 z = X\%*\%betas + rnorm(nobs) 88 y = cut(z, br = cutoff, right=TRUE, include.lowest = TRUE, labels = FALSE) 89 return(list(y = y, X = X, k=(length(cutoff)-1), betas= betas, cutoff=cutoff )) 90} 91 92nobs = 300 93X = cbind(rep(1,nobs),runif(nobs, min=0, max=5),runif(nobs,min=0, max=5)) 94k = 5 95betas = c(0.5, 1, -0.5) 96cutoff = c(-100, 0, 1.0, 1.8, 3.2, 100) 97simout = simordprobit(X, betas, cutoff) 98 99Data=list(X=simout$X, y=simout$y, k=k) 100 101## set Mcmc for ordered probit model 102 103Mcmc = list(R=R) 104out = rordprobitGibbs(Data=Data, Mcmc=Mcmc) 105 106cat(" ", fill=TRUE) 107cat("acceptance rate= ", accept=out$accept, fill=TRUE) 108 109## outputs of betadraw and cut-off draws 110 111cat(" Summary of betadraws", fill=TRUE) 112summary(out$betadraw, tvalues=betas) 113cat(" Summary of cut-off draws", fill=TRUE) 114summary(out$cutdraw, tvalues=cutoff[2:k]) 115 116## plotting examples 117if(0){plot(out$cutdraw)} 118} 119 120\keyword{models} 121