# Use a Gibbs sampler to fit a general model of y on one variable x # data is dataframe including x (predictor), y (observations) # Initial parameters are start -- # the last parameter is SD # model parameters beta are all except the last modelfit.Bayes=function(start,data,xcol="dbh1",ycol="status2",model=asymp.model,steps=20,showstep=1,burnin=500) { on.exit(cat(i,j,"\n")) no.param=length(start) param=matrix(nrow=steps,ncol=no.param) param[1,]=start stepsize=rep(1,no.param) llike=numeric() x=data[,xcol] y=data[,ycol] ## Calculate likelihood of data at starting point for(i in 2:steps) { for(j in 1:no.param) { if(j==1) allparam=param[i-1,] else allparam=c(param[i,1:(j-1)],param[i-1,j:no.param]) # browser() metropResult=metrop1step(func=llike.genmodel,start.param=param[i-1,j], allparam=allparam,whichtest=j, x=x,y=y,model=model,scale.param=stepsize[j], adjust=1.01,target=0.25) param[i,j]=metropResult[1] stepsize[j]=metropResult[2] # browser() } ### Calculate and save likelihood after finishing each step llike[i]=metropResult[4] #### cat results if desired if(i%%showstep==0) cat("Step ", i, ": Param: ", round(param[i,],3), "llike: ", round(llike[i],4), "\n") } keep=(-1)*(1:burnin) best=colMeans(param[keep,]) CI=apply(param[keep,],2,CI) return(list(best=best,CI=CI,full=param,llike=llike)) } # Likelihood function where the first parameter of the model is passed llike.genmodel=function(testvalue,whichtest,allparam,x,y,model) { param=allparam param[whichtest]=testvalue no.param=length(param) beta=param[-no.param] SD=param[no.param] if(SD<=0) return(-Inf) yhat=model(beta,x) llike=dnorm(y,mean=yhat,sd=SD,log=TRUE) # if(length(which(is.na(llike)))>0) # browser() return(sum(llike,na.rm=TRUE)) }