ovenCounts.MLE = function() { # define log likelihood function logLike = function(yMat, nrepls, lambda, p, NinftyMaximum=5000) { nsites = length(nrepls) logLike = rep(NA, nsites) for (i in 1:nsites) { y = yMat[i, 1:nrepls[i]] Nmin = max(y) # compute normalizing constant for Poisson Ninfty = Nmin deltaNinfty = 10 denom = 0 while (abs(1-denom)>=0.001 & Ninfty= NinftyMaximum) { cat(Ninfty, " (= upper bound for support of Poisson) exceeds maximum allowable value \n") } # compute marginal likelihood for site siteSum = 0 if (Nmin==0) { siteSum = siteSum + exp(-lambda[i]) Nmin=1 } N = Nmin:Ninfty logSum = dpois(N, lambda=lambda[i], log=T) for (j in 1:nrepls[i]) { logSum = logSum + dbinom(y[j], size=N, prob=p[i], log=T) } siteSum = siteSum + sum(exp(logSum)) logLike[i] = log(siteSum) } sum(logLike) } # define negative log likeihood function negLL = function(param) { pVec = rep(expit(param[1]), nsites) lambdaVec = rep(exp(param[2]), nsites) (-1)*logLike(yMat, nrepls, lambdaVec, pVec) } # read data from file and pre-process for analysis yMat = as.matrix(read.table('oven.txt')) nsites = dim(yMat)[1] nrepls= rep(dim(yMat)[2], nsites) yMax = rep(NA, nsites) for (i in 1:nsites) { yMax[i] = max(yMat[i, 1:nrepls[i]]) } pGuess = 0.5 lambdaGuess = mean(yMax)/pGuess # fit model to data fit = optim(par=c(logit(pGuess), log(lambdaGuess)), fn=negLL, method='BFGS', hessian=T) list(logLikelihood=-fit$value, mle=fit$par, covMat=chol2inv(chol(fit$hessian))) }