diff --git a/R/add_binomial.R b/R/add_binomial.R
index 0663a866..9c3603be 100644
--- a/R/add_binomial.R
+++ b/R/add_binomial.R
@@ -75,6 +75,51 @@ add_binomial = function(formula,
     trials <- NULL
   }
 
+  # Update parameters block
+  if(family_char == 'beta_binomial'){
+    model_file[grep("vector[num_basis] b_raw;",
+                    model_file, fixed = TRUE)] <-
+      paste0("vector[num_basis] b_raw;\n",
+             "vector<lower=0>[n_series] phi;")
+    model_file <- readLines(textConnection(model_file), n = -1)
+  }
+
+  # Update functions block
+  if(family_char == 'beta_binomial'){
+    if(any(grepl('functions {', model_file, fixed = TRUE))){
+      model_file[grep('functions {', model_file, fixed = TRUE)] <-
+        paste0("functions {\n",
+               "vector rep_each(vector x, int K) {\n",
+               "int N = rows(x);\n",
+               "vector[N * K] y;\n",
+               "int pos = 1;\n",
+               "for (n in 1 : N) {\n",
+               "for (k in 1 : K) {\n",
+               "y[pos] = x[n];\n",
+               "pos += 1;\n",
+               "}\n",
+               "}\n",
+               "return y;\n",
+               "}")
+    } else {
+      model_file[grep('Stan model code', model_file)] <-
+        paste0('// Stan model code generated by package mvgam\n',
+               'functions {\n',
+               "vector rep_each(vector x, int K) {\n",
+               "int N = rows(x);\n",
+               "vector[N * K] y;\n",
+               "int pos = 1;\n",
+               "for (n in 1 : N) {\n",
+               "for (k in 1 : K) {\n",
+               "y[pos] = x[n];\n",
+               "pos += 1;\n",
+               "}\n",
+               "}\n",
+               "return y;\n",
+               "}\n}")
+    }
+  }
+
   # Update model block
   if(family_char == 'binomial'){
     if(any(grepl("flat_ys ~ poisson_log_glm(flat_xs,",
@@ -92,6 +137,38 @@ add_binomial = function(formula,
     }
   }
 
+  if(family_char == 'beta_binomial'){
+    model_file[grep("model {", model_file, fixed = TRUE)] <-
+      paste0("model {\n",
+             "// priors for Beta dispersion parameters\n",
+             "phi ~ gamma(0.01, 0.01);")
+    if(any(grepl("flat_ys ~ poisson_log_glm(flat_xs,",
+                 model_file, fixed = TRUE))){
+      linenum <- grep("flat_ys ~ poisson_log_glm(flat_xs,",
+                      model_file, fixed = TRUE)
+      model_file[linenum] <-
+        paste0("vector[n_nonmissing] flat_phis;\n",
+               "flat_phis = rep_each(phi, n)[obs_ind];\n",
+               "flat_ys ~ beta_binomial(flat_trials_train, inv_logit(flat_xs * b) .* flat_phis, (1 - inv_logit(flat_xs * b)) .* flat_phis);")
+
+      model_file <- model_file[-(linenum + 1)]
+      model_file <- readLines(textConnection(model_file), n = -1)
+    }
+
+    if(any(grepl("flat_ys ~ poisson_log_glm(append_col(flat_xs, flat_trends),",
+                 model_file, fixed = TRUE))){
+      linenum <- grep("flat_ys ~ poisson_log_glm(append_col(flat_xs, flat_trends),",
+                      model_file, fixed = TRUE)
+      model_file[linenum] <-
+        paste0("vector[n_nonmissing] flat_phis;\n",
+               "flat_phis = rep_each(phi, n)[obs_ind];\n",
+               "flat_ys ~ beta_binomial(flat_trials_train, inv_logit(append_col(flat_xs, flat_trends) * b) .* flat_phis, (1 - inv_logit(append_col(flat_xs, flat_trends) * b)) .* flat_phis);")
+
+      model_file <- model_file[-(linenum + 1)]
+      model_file <- readLines(textConnection(model_file), n = -1)
+    }
+  }
+
   if(family_char == 'bernoulli'){
     if(any(grepl("flat_ys ~ poisson_log_glm(flat_xs,",
                  model_file, fixed = TRUE))){
@@ -116,6 +193,22 @@ add_binomial = function(formula,
       "ypred[1:n, s] = binomial_rng(flat_trials[ytimes[1:n, s]], inv_logit(mus[1:n, s]));"
   }
 
+  if(family_char == 'beta_binomial'){
+    model_file[grep("vector[total_obs] eta;", model_file, fixed = TRUE)] <-
+      paste0("vector[total_obs] eta;\n",
+             "matrix[n, n_series] phi_vec;")
+
+    model_file[grep("eta = X * b;", model_file, fixed = TRUE)] <-
+      paste0("eta = X * b;;\n",
+             "for (s in 1 : n_series) {\n",
+             "phi_vec[1 : n, s] = rep_vector(phi[s], n);\n",
+             "}")
+
+    model_file[grep("ypred[1:n, s] = poisson_log_rng(mus[1:n, s]);" ,
+                    model_file, fixed = TRUE)] <-
+      "ypred[1:n, s] = beta_binomial_rng(flat_trials[ytimes[1:n, s]], inv_logit(mus[1:n, s]) .* phi_vec[1:n, s], (1 - inv_logit(mus[1:n, s])) .* phi_vec[1:n, s]);"
+  }
+
   if(family_char == 'bernoulli'){
     model_file[grep("ypred[1:n, s] = poisson_log_rng(mus[1:n, s]);" ,
                     model_file, fixed = TRUE)] <-
diff --git a/R/conditional_effects.R b/R/conditional_effects.R
index 61bf8426..46a10ed4 100644
--- a/R/conditional_effects.R
+++ b/R/conditional_effects.R
@@ -86,6 +86,23 @@ conditional_effects.mvgam = function(x,
   type <- match.arg(type, c('response', 'link',
                             'detection', 'latent_N',
                             'expected'))
+
+  # Can't plot points or rugs with binomial models due to the
+  # cbind syntax
+  if(rug){
+    if(x$family %in% c('binomial', 'beta_binomial')){
+      rug <- FALSE
+      warning('Cannot show observation rug for binomial models')
+    }
+  }
+
+  if(points){
+    if(x$family %in% c('binomial', 'beta_binomial')){
+      points <- FALSE
+      warning('Cannot show observation points for binomial models')
+    }
+  }
+
   if(type == 'response'){
     if(points){
       points <- 0.5
diff --git a/R/families.R b/R/families.R
index d2a5ecdf..3037e463 100644
--- a/R/families.R
+++ b/R/families.R
@@ -2,7 +2,7 @@
 #' @importFrom stats make.link dgamma pgamma rgamma qnorm plnorm runif pbeta dlnorm dpois
 #' @importFrom stats pnorm ppois plogis gaussian poisson Gamma dnbinom rnbinom dnorm dbeta
 #' @importFrom stats binomial rbinom pbinom dbinom qbinom qlogis
-#' @importFrom brms lognormal student bernoulli rstudent_t qstudent_t dstudent_t pstudent_t
+#' @importFrom brms lognormal student bernoulli rstudent_t qstudent_t dstudent_t pstudent_t dbeta_binomial rbeta_binomial pbeta_binomial
 #' @param link a specification for the family link function. At present these cannot
 #' be changed
 #' @details \code{mvgam} currently supports the following standard observation families:
@@ -192,6 +192,33 @@ student_t = function(link = 'identity'){
 #'                 conf_level = 0.5) +
 #'  ylab('Observed abundance') +
 #'  theme_classic()
+#'
+#' # Example showcasing how cbind() is needed for Binomial observations
+#' # Simulate two time series of Binomial trials
+#' trials <- sample(c(20:25), 50, replace = TRUE)
+#' x <- rnorm(50)
+#' detprob1 <- plogis(-0.5 + 0.9*x)
+#' detprob2 <- plogis(-0.1 -0.7*x)
+#' dat <- rbind(data.frame(y = rbinom(n = 50, size = trials, prob = detprob1),
+#'                         time = 1:50,
+#'                         series = 'series1',
+#'                         x = x,
+#'                         ntrials = trials),
+#'              data.frame(y = rbinom(n = 50, size = trials, prob = detprob2),
+#'                         time = 1:50,
+#'                         series = 'series2',
+#'                         x = x,
+#'                         ntrials = trials))
+#' dat <- dplyr::mutate(dat, series = as.factor(series))
+#' dat <- dplyr::arrange(dat, time, series)
+#'
+#' # Fit a model using the binomial() family; must specify observations
+#' # and number of trials in the cbind() wrapper
+#' mod <- mvgam(cbind(y, ntrials) ~ series + s(x, by = series),
+#'              family = binomial(),
+#'              data = dat)
+#' summary(mod)
+#'
 #' }
 #' @export
 nmix = function(link = 'log'){
@@ -209,6 +236,7 @@ family_char_choices = function(){
   c('negative binomial',
     "poisson",
     "binomial",
+    'beta_binomial',
     "bernoulli",
     "nmix",
     "tweedie",
@@ -541,7 +569,7 @@ mvgam_predict = function(Xp,
     } else if(type == 'variance'){
       mu <- plogis(as.vector((matrix(Xp, ncol = NCOL(Xp)) %*%
                              betas) + attr(Xp, 'model.offset')))
-      out <- mu * (1 - mu)
+      out <- as.vector(family_pars$trials) * mu * (1 - mu)
     } else {
       out <- plogis(((matrix(Xp, ncol = NCOL(Xp)) %*%
                      betas)) +
@@ -550,6 +578,45 @@ mvgam_predict = function(Xp,
     }
   }
 
+  # Beta_Binomial observations (requires arguments 'trials' and 'phi')
+  if(family == 'beta_binomial'){
+    if(type ==  'link'){
+      out <- as.vector((matrix(Xp, ncol = NCOL(Xp)) %*%
+                          betas) + attr(Xp, 'model.offset'))
+
+      if(density){
+        out <- dbeta_binomial(truth,
+                              mu = plogis(out),
+                              phi = as.vector(family_pars$phi),
+                              size = as.vector(family_pars$trials),
+                              log = TRUE)
+      }
+
+    } else if(type == 'response'){
+      out <- rbeta_binomial(n = NROW(Xp),
+                            mu = plogis(((matrix(Xp, ncol = NCOL(Xp)) %*%
+                                            betas)) +
+                                          attr(Xp, 'model.offset')),
+                            phi = as.vector(family_pars$phi),
+                            size = as.vector(family_pars$trials))
+    } else if(type == 'variance'){
+      mu <- plogis(as.vector((matrix(Xp, ncol = NCOL(Xp)) %*%
+                                betas) + attr(Xp, 'model.offset')))
+      # https://en.wikipedia.org/wiki/Beta-binomial_distribution
+      alpha <- mu * as.vector(family_pars$phi)
+      beta <- (1 - mu) * as.vector(family_pars$phi)
+      p <- 1 / (alpha + beta + 1)
+      n <- as.vector(family_pars$trials)
+      out <- ((n * p) * (1 - p)) * ((alpha + beta + n) / (alpha + beta + 1))
+
+    } else {
+      out <- plogis(((matrix(Xp, ncol = NCOL(Xp)) %*%
+                        betas)) +
+                      attr(Xp, 'model.offset')) *
+        as.vector(family_pars$trials)
+    }
+  }
+
   # Negative Binomial observations (requires argument 'phi')
   if(family == 'negative binomial'){
     if(type ==  'link'){
@@ -710,7 +777,7 @@ family_to_mgcvfam = function(family){
     mgcv::Tweedie(p = 1.5, link = 'log')
   } else if(family$family == 'nmix'){
     poisson()
-  } else if(family$family == 'bernoulli'){
+  } else if(family$family %in% c('bernoulli', 'beta_binomial')){
     binomial()
   } else {
     family
@@ -781,6 +848,7 @@ family_par_names = function(family){
   }
 
   if(family %in% c('beta',
+                   'beta_binomial',
                    'negative binomial',
                    'tweedie')){
     out <- c('phi')
@@ -921,6 +989,21 @@ family_prior_info = function(family, use_stan, data){
                              )))
   }
 
+  if(family == 'beta_binomial'){
+    prior_df <- data.frame(param_name = c('vector<lower=0>[n_series] phi;'),
+                           param_length = length(unique(data$series)),
+                           param_info = c('Beta Binomial precision parameter'),
+                           prior = c('phi ~ gamma(0.01, 0.01);'),
+                           example_change = c(
+                             paste0(
+                               'phi ~ normal(',
+                               round(runif(min = -1, max = 1, n = 1), 2),
+                               ', ',
+                               round(runif(min = 0.1, max = 1, n = 1), 2),
+                               ');'
+                             )))
+  }
+
   if(family == 'Gamma'){
     prior_df <- data.frame(param_name = c('vector<lower=0>[n_series] shape;'),
                            param_length = length(unique(data$series)),
@@ -1045,6 +1128,37 @@ ds_resids_binomial = function(truth, fitted, draw, N){
   dsres_out
 }
 
+#' @noRd
+ds_resids_beta_binomial = function(truth, fitted, draw, N, phi){
+  na_obs <- is.na(truth)
+  a_obs <- pbeta_binomial(ifelse(as.vector(truth[!na_obs]) - 1.e-6 > 0,
+                                 as.vector(truth[!na_obs]) - 1.e-6,
+                                 0),
+                          size = N[!na_obs],
+                          mu = fitted[!na_obs],
+                          phi = phi[!na_obs])
+  b_obs <- pbeta_binomial(ifelse(as.vector(truth[!na_obs]),
+                                 as.vector(truth[!na_obs]),
+                                 0),
+                          size = N[!na_obs],
+                          mu = fitted[!na_obs],
+                          phi = phi[!na_obs])
+  u_obs <- runif(n = length(truth[!na_obs]),
+                 min = pmin(a_obs, b_obs),
+                 max = pmax(a_obs, b_obs))
+
+  if(any(is.na(truth))){
+    u <- vector(length = length(truth))
+    u[na_obs] <- NaN
+    u[!na_obs] <- u_obs
+  } else {
+    u <- u_obs
+  }
+  dsres_out <- qnorm(u)
+  dsres_out[is.infinite(dsres_out)] <- NaN
+  dsres_out
+}
+
 #' @noRd
 ds_resids_nb = function(truth, fitted, draw, size){
   na_obs <- is.na(truth)
@@ -1529,7 +1643,7 @@ dsresids_vec = function(object){
     N <- mcmc_chains(object$model_output, 'latent_ypred')[,1:last_train, drop = FALSE]
   }
 
-  if(family == 'binomial'){
+  if(family %in% c('binomial', 'beta_binomial')){
     p <- plogis(mcmc_chains(object$model_output, 'mus')[,1:last_train,
                                                         drop = FALSE])
     N <- as.vector(attr(object$mgcv_model, 'trials'))[1:length(truth)]
@@ -1573,6 +1687,19 @@ dsresids_vec = function(object){
                                         N = as.vector(N_mat)),
                      nrow = NROW(preds))
   }
+
+  if(family == 'beta_binomial'){
+    N_mat <- matrix(rep(N, NROW(preds)),
+                    nrow = NROW(preds),
+                    byrow = TRUE)
+    resids <- matrix(ds_resids_beta_binomial(truth = as.vector(truth_mat),
+                                             fitted = as.vector(p),
+                                             draw = 1,
+                                             N = as.vector(N_mat),
+                                             phi = family_extracts$phi),
+                     nrow = NROW(preds))
+  }
+
   if(family == 'bernoulli'){
     resids <- matrix(ds_resids_binomial(truth = as.vector(truth_mat),
                                         fitted = as.vector(preds),
diff --git a/R/forecast.mvgam.R b/R/forecast.mvgam.R
index 9d5eba75..61c67420 100644
--- a/R/forecast.mvgam.R
+++ b/R/forecast.mvgam.R
@@ -209,7 +209,7 @@ forecast.mvgam = function(object, newdata, data_test,
           names(par_extracts) <- names(family_pars)
 
           # Add trial information if this is a Binomial model
-          if(object$family == 'binomial'){
+          if(object$family %in% c('binomial', 'beta_binomial')){
             trials <- as.vector(matrix(rep(as.vector(attr(object$mgcv_model, 'trials')[,series]),
                                            NROW(mus)),
                                        nrow = NROW(mus),
@@ -438,7 +438,7 @@ forecast.mvgam = function(object, newdata, data_test,
           names(par_extracts) <- names(family_pars)
 
           # Add trial information if this is a Binomial model
-          if(object$family == 'binomial'){
+          if(object$family %in% c('binomial', 'beta_binomial')){
             trials <- as.vector(matrix(rep(as.vector(attr(object$mgcv_model, 'trials')[,series]),
                                            NROW(mus)),
                                        nrow = NROW(mus),
@@ -534,7 +534,7 @@ forecast.mvgam = function(object, newdata, data_test,
           names(par_extracts) <- names(family_pars)
 
           # Add trial information if this is a Binomial model
-          if(object$family == 'binomial'){
+          if(object$family %in% c('binomial', 'beta_binomial')){
             trials <- as.vector(matrix(rep(as.vector(attr(object$mgcv_model, 'trials')[1:last_train, series]),
                                            NROW(mus)),
                                        nrow = NROW(mus),
@@ -915,7 +915,7 @@ forecast_draws = function(object,
   family_pars <- extract_family_pars(object = object, newdata = data_test)
 
   # Add trial information if this is a Binomial model
-  if(object$family == 'binomial'){
+  if(object$family %in% c('binomial', 'beta_binomial')){
     resp_terms <- as.character(terms(formula(object$formula))[[2]])
     resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
     trial_name <- resp_terms[2]
@@ -1108,7 +1108,7 @@ forecast_draws = function(object,
           names(family_extracts) <- names(family_pars)
 
           # Add trial information if this is a Binomial model
-          if(family == 'binomial'){
+          if(family %in% c('binomial', 'beta_binomial')){
             family_extracts$trials <- trials[,series]
           }
 
diff --git a/R/get_monitor_pars.R b/R/get_monitor_pars.R
index 4a4395a8..a5698f98 100644
--- a/R/get_monitor_pars.R
+++ b/R/get_monitor_pars.R
@@ -15,7 +15,8 @@ get_monitor_pars = function(family, smooths_included = TRUE,
                                                 "tweedie", "beta",
                                                 "gaussian", "lognormal",
                                                 "student", "Gamma",
-                                                "nmix", "binomial", "bernoulli"))
+                                                "nmix", "binomial", "bernoulli",
+                                                "beta_binomial"))
 
   if(smooths_included){
     param <- c('rho', 'b', 'ypred', 'mus', 'lp__', 'lambda')
diff --git a/R/hindcast.mvgam.R b/R/hindcast.mvgam.R
index 4ad401d4..7cf85cc9 100644
--- a/R/hindcast.mvgam.R
+++ b/R/hindcast.mvgam.R
@@ -138,7 +138,7 @@ if(series == 'all'){
       names(par_extracts) <- names(family_pars)
 
       # Add trial information if this is a Binomial model
-      if(object$family == 'binomial'){
+      if(object$family %in% c('binomial', 'beta_binomial')){
         trials <- as.vector(matrix(rep(as.vector(attr(object$mgcv_model, 'trials')[,series]),
                                        NROW(preds)),
                                    nrow = NROW(preds),
diff --git a/R/logLik.mvgam.R b/R/logLik.mvgam.R
index ea2fa87e..cf2a36d3 100644
--- a/R/logLik.mvgam.R
+++ b/R/logLik.mvgam.R
@@ -143,7 +143,7 @@ logLik.mvgam = function(object,
   names(family_extracts) <- names(family_pars)
 
   # Add trial information if this is a Binomial model
-  if(object$family == 'binomial'){
+  if(object$family %in% c('binomial', 'beta_binomial')){
     trials <- as.vector(matrix(rep(as.vector(attr(object$mgcv_model, 'trials')),
                                    NROW(mus)),
                                nrow = NROW(mus),
diff --git a/R/marginaleffects.mvgam.R b/R/marginaleffects.mvgam.R
index 4e18fe64..20587d40 100644
--- a/R/marginaleffects.mvgam.R
+++ b/R/marginaleffects.mvgam.R
@@ -95,7 +95,16 @@ get_predict.mvgam <- function(model, newdata,
 #' @export
 get_data.mvgam = function (x, source = "environment", verbose = TRUE, ...) {
 
-  resp <- as.character(rlang::f_lhs(x$call))
+  resp_terms <- as.character(rlang::f_lhs(x$call))
+  if(length(resp_terms) == 1L){
+    resp <- resp_terms
+  } else {
+    if(any(grepl('cbind', resp_terms))){
+      resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
+      resp <- resp_terms[1]
+    }
+  }
+
   mf <- tryCatch({
     # Drop response observations if a trend call was used because often
     # there won't be an easy way to match them up (for example if multiple
@@ -165,14 +174,32 @@ get_data.mvgam = function (x, source = "environment", verbose = TRUE, ...) {
   })
 
   prep_data <- utils::getFromNamespace(".prepare_get_data", "insight")
-  prep_data(x, mf, effects = "all", verbose = verbose)
+  out <- prep_data(x, mf, effects = "all", verbose = verbose)
+
+  # Remove colnames with cbind in them as these can be artifacts in
+  # binomial model data preparations
+  if(any(grepl('cbind', colnames(out)))){
+    out %>%
+      dplyr::select(-matches('cbind')) -> out
+  }
+
+  return(out)
 }
 
 #' @rdname mvgam_marginaleffects
 #' @export
 get_data.mvgam_prefit = function (x, source = "environment", verbose = TRUE, ...) {
 
-  resp <- as.character(rlang::f_lhs(x$call))
+  resp_terms <- as.character(rlang::f_lhs(x$call))
+  if(length(resp_terms) == 1L){
+    resp <- resp_terms
+  } else {
+    if(any(grepl('cbind', resp_terms))){
+      resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
+      resp <- resp_terms[1]
+    }
+  }
+
   mf <- tryCatch({
     # Drop response observations if a trend call was used because often
     # there won't be an easy way to match them up (for example if multiple
@@ -242,7 +269,16 @@ get_data.mvgam_prefit = function (x, source = "environment", verbose = TRUE, ...
   })
 
   prep_data <- utils::getFromNamespace(".prepare_get_data", "insight")
-  prep_data(x, mf, effects = "all", verbose = verbose)
+  out <- prep_data(x, mf, effects = "all", verbose = verbose)
+
+  # Remove colnames with cbind in them as these can be artifacts in
+  # binomial model data preparations
+  if(any(grepl('cbind', colnames(out)))){
+    out %>%
+      dplyr::select(-matches('cbind')) -> out
+  }
+
+  return(out)
 }
 
 #' @rdname mvgam_marginaleffects
diff --git a/R/model.frame.mvgam.R b/R/model.frame.mvgam.R
index 4bb0b5cb..ffa8fa23 100644
--- a/R/model.frame.mvgam.R
+++ b/R/model.frame.mvgam.R
@@ -21,7 +21,15 @@ model.frame.mvgam = function(formula, trend_effects = FALSE, ...){
     out <- stats::model.frame(formula$mgcv_model)
 
     # Identify response variable
-    resp <- as.character(rlang::f_lhs(formula$call))
+    resp_terms <- as.character(rlang::f_lhs(formula$call))
+    if(length(resp_terms) == 1L){
+      resp <- resp_terms
+    } else {
+      if(any(grepl('cbind', resp_terms))){
+        resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
+        resp <- resp_terms[1]
+      }
+    }
 
     # Now add the observed response, in case there are any
     # NAs there that need to be updated
@@ -48,7 +56,15 @@ model.frame.mvgam_prefit = function(formula, trend_effects = FALSE, ...){
     out <- stats::model.frame(formula$mgcv_model)
 
     # Identify response variable
-    resp <- as.character(rlang::f_lhs(formula$call))
+    resp_terms <- as.character(rlang::f_lhs(formula$call))
+    if(length(resp_terms) == 1L){
+      resp <- resp_terms
+    } else {
+      if(any(grepl('cbind', resp_terms))){
+        resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
+        resp <- resp_terms[1]
+      }
+    }
 
     # Now add the observed response, in case there are any
     # NAs there that need to be updated
diff --git a/R/mvgam.R b/R/mvgam.R
index 52e4d0dc..f79516ea 100644
--- a/R/mvgam.R
+++ b/R/mvgam.R
@@ -1558,6 +1558,19 @@ mvgam = function(formula,
       orig_trend_model$cap <- pw_additions$model_data$cap
     }
 
+    # Updates for Binomial and Bernoulli families
+    if(family_char %in% c('binomial', 'bernoulli', 'beta_binomial')){
+      bin_additions <- add_binomial(formula,
+                                    vectorised$model_file,
+                                    vectorised$model_data,
+                                    data_train,
+                                    data_test,
+                                    family_char)
+      vectorised$model_file <- bin_additions$model_file
+      vectorised$model_data <- bin_additions$model_data
+      attr(ss_gam, 'trials') <- bin_additions$trials
+    }
+
     # Add in any user-specified priors
     if(!missing(priors)){
       vectorised$model_file <- update_priors(vectorised$model_file, priors,
@@ -1596,19 +1609,6 @@ mvgam = function(formula,
                                    'lv_coefs', 'error')]
     }
 
-    # Updates for Binomial and Bernoulli families
-    if(family_char %in% c('binomial', 'bernoulli', 'beta_binomial')){
-      bin_additions <- add_binomial(formula,
-                                    vectorised$model_file,
-                                    vectorised$model_data,
-                                    data_train,
-                                    data_test,
-                                    family_char)
-      vectorised$model_file <- bin_additions$model_file
-      vectorised$model_data <- bin_additions$model_data
-      attr(ss_gam, 'trials') <- bin_additions$trials
-    }
-
     # Updates for sharing of observation params
     if(share_obs_params){
       vectorised$model_file <- shared_obs_params(vectorised$model_file,
diff --git a/R/mvgam_setup.R b/R/mvgam_setup.R
index cbfa64c3..7b2f53f7 100644
--- a/R/mvgam_setup.R
+++ b/R/mvgam_setup.R
@@ -42,7 +42,7 @@ jagam_setup <- function(ss_gam, formula, data_train, family,
 
   # Change the formula to a Poisson-like formula if this is a cbind Binomial,
   # as jagam will fail if it sees that
-  if(family$family == 'binomial'){
+  if(family$family %in% c('binomial', 'beta_binomial')){
     resp_terms <- as.character(terms(formula(formula))[[2]])
     if(any(grepl('cbind', resp_terms))){
       resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
diff --git a/R/plot_mvgam_fc.R b/R/plot_mvgam_fc.R
index 3910bffe..c16059a8 100644
--- a/R/plot_mvgam_fc.R
+++ b/R/plot_mvgam_fc.R
@@ -249,7 +249,7 @@ plot_mvgam_fc = function(object, series = 1, newdata, data_test,
       dplyr::arrange(time) %>%
       dplyr::pull(y)
 
-    if(tolower(object$family) %in% c('beta')){
+    if(tolower(object$family) %in% c('beta', 'bernoulli')){
       ylim <- c(min(cred, min(ytrain, na.rm = TRUE)),
                 max(cred, max(ytrain, na.rm = TRUE)))
       ymin <- max(0, ylim[1])
@@ -571,7 +571,7 @@ plot.mvgam_forecast = function(x, series = 1,
       ylim <- c(min(cred, min(ytrain, na.rm = TRUE)),
                 max(cred, max(ytrain, na.rm = TRUE)) * 1.1)
 
-      if(object$family == 'beta'){
+      if(object$family %in% c('beta', 'bernoulli')){
         ymin <- max(0, ylim[1])
         ymax <- min(1, ylim[2])
         ylim <- c(ymin, ymax)
diff --git a/R/posterior_epred.mvgam.R b/R/posterior_epred.mvgam.R
index dc2ba7e0..8101258f 100644
--- a/R/posterior_epred.mvgam.R
+++ b/R/posterior_epred.mvgam.R
@@ -302,7 +302,7 @@ fitted.mvgam <- function(object, process_error = TRUE,
     names(family_extracts) <- names(family_pars)
 
     # Add trial information if this is a Binomial model
-    if(object$family == 'binomial'){
+    if(object$family %in% c('binomial', 'beta_binomial')){
       trials <- as.vector(matrix(rep(as.vector(attr(object$mgcv_model, 'trials')),
                                      NROW(mus)),
                                  nrow = NROW(mus),
diff --git a/R/ppc.mvgam.R b/R/ppc.mvgam.R
index 73172793..6543dbac 100644
--- a/R/ppc.mvgam.R
+++ b/R/ppc.mvgam.R
@@ -71,7 +71,8 @@ ppc.mvgam = function(object, newdata, data_test, series = 1, type = 'hist',
                                             "prop_zero"))
 
   if(type == 'rootogram'){
-    if(!object$family %in% c('poisson', 'negative binomial', 'tweedie', 'nmix')){
+    if(!object$family %in% c('poisson', 'negative binomial', 'tweedie', 'nmix',
+                             'binomial', 'beta_binomial')){
       stop('Rootograms not supported for checking non-count data',
            call. = FALSE)
     }
diff --git a/R/predict.mvgam.R b/R/predict.mvgam.R
index 643b7232..99fda380 100644
--- a/R/predict.mvgam.R
+++ b/R/predict.mvgam.R
@@ -273,7 +273,7 @@ predict.mvgam = function(object, newdata,
   names(family_extracts) <- names(family_pars)
 
   # Add trial information if this is a Binomial model
-  if(object$family == 'binomial'){
+  if(object$family %in% c('binomial', 'beta_binomial')){
     resp_terms <- as.character(terms(formula(object))[[2]])
     resp_terms <- resp_terms[-grepl('cbind', resp_terms)]
     trial_name <- resp_terms[2]
@@ -281,15 +281,17 @@ predict.mvgam = function(object, newdata,
       stop(paste0('variable ', trial_name, ' not found in newdata'),
            call. = FALSE)
     }
-    trial_df <- data.frame(series = newdata$series,
-                           time = newdata$time,
-                           trial = newdata[[trial_name]])
-    trials <- do.call(cbind, lapply(length(levels(newdata$series)), function(i){
-      trial_df %>%
-        dplyr::filter(series == levels(newdata$series)[i]) %>%
-        dplyr::arrange(time) %>%
-        dplyr::pull(trial)
-    }))
+
+    trials <- newdata[[trial_name]]
+    # trial_df <- data.frame(series = newdata$series,
+    #                        time = newdata$time,
+    #                        trial = newdata[[trial_name]])
+    # trials <- do.call(cbind, lapply(length(levels(newdata$series)), function(i){
+    #   trial_df %>%
+    #     dplyr::filter(series == levels(newdata$series)[i]) %>%
+    #     dplyr::arrange(time) %>%
+    #     dplyr::pull(trial)
+    # }))
 
     trials <- as.vector(matrix(rep(as.vector(trials),
                                    NROW(betas)),
diff --git a/R/sim_mvgam.R b/R/sim_mvgam.R
index 24e0b800..f74737eb 100644
--- a/R/sim_mvgam.R
+++ b/R/sim_mvgam.R
@@ -29,7 +29,7 @@
 #'@param trend_rel Deprecated. Use `prop_trend` instead
 #'@param freq \code{integer}. The seasonal frequency of the series
 #'@param family \code{family} specifying the exponential observation family for the series. Currently supported
-#'families are: `nb()`, `poisson()`, `tweedie()`, `gaussian()`,
+#'families are: `nb()`, `poisson()`, `bernoulli()`, `tweedie()`, `gaussian()`,
 #'`betar()`, `lognormal()`, `student()` and `Gamma()`
 #'@param phi \code{vector} of dispersion parameters for the series
 #'(i.e. `size` for `nb()` or
@@ -92,6 +92,7 @@ sim_mvgam = function(T = 100,
   family_char <- match.arg(arg = family$family,
                            choices = c('negative binomial',
                                        "poisson",
+                                       "bernoulli",
                                        "tweedie",
                                        "beta",
                                        "gaussian",
@@ -185,7 +186,7 @@ sim_mvgam = function(T = 100,
   }
 
   if(missing(mu)){
-    mu <- sample(seq(-0.5, 0.5), n_series, T)
+    mu <- sample(seq(-0.5, 0.5), n_series, TRUE)
   }
 
   if(length(phi) < n_series){
@@ -364,13 +365,13 @@ sim_mvgam = function(T = 100,
   obs_ys <- c(unlist(lapply(seq_len(n_series), function(x){
     if(seasonality == 'shared'){
 
-      dynamics <- (glob_season * (1-trend_rel)) +
+      dynamics <- (glob_season * (1 - trend_rel)) +
         (obs_trends[,x] * trend_rel)
 
     } else {
       yseason <- as.vector(scale(stl(ts(rnorm(T, glob_season, sd = 2),
                                         frequency = freq), 'periodic')$time.series[,1]))
-      dynamics <- (yseason * (1-trend_rel)) +
+      dynamics <- (yseason * (1 - trend_rel)) +
         (obs_trends[,x] * trend_rel)
     }
 
@@ -384,6 +385,12 @@ sim_mvgam = function(T = 100,
                    lambda = exp(mu[x] + dynamics))
     }
 
+    if(family_char == 'bernoulli'){
+      out <- rbinom(length(dynamics),
+                    size = 1,
+                    prob = plogis(mu[x] + dynamics))
+    }
+
     if(family_char == 'tweedie'){
       out <- rpois(n = length(dynamics),
                    lambda = tweedie::rtweedie(length(dynamics),
@@ -472,18 +479,18 @@ sim_mvgam = function(T = 100,
                           rho = trend_rhos)
     }
 
-    out <-   list(data_train = data.frame(data_train),
-                  data_test = data.frame(data_test),
-                  true_corrs = cov2cor(cov(obs_trends)),
-                  true_trends = obs_trends,
-                  global_seasonality = glob_season,
-                  trend_params = trend_params)
+    out <- list(data_train = data.frame(data_train),
+                data_test = data.frame(data_test),
+                true_corrs = cov2cor(cov(obs_trends)),
+                true_trends = obs_trends,
+                global_seasonality = glob_season,
+                trend_params = trend_params)
   } else {
-    out <-   list(data_train = data.frame(data_train),
-                  data_test = data.frame(data_test),
-                  true_corrs = cov2cor(cov(obs_trends)),
-                  true_trends = obs_trends,
-                  global_seasonality = glob_season)
+    out <- list(data_train = data.frame(data_train),
+                data_test = data.frame(data_test),
+                true_corrs = cov2cor(cov(obs_trends)),
+                true_trends = obs_trends,
+                global_seasonality = glob_season)
   }
 
 return(out)
diff --git a/R/summary.mvgam.R b/R/summary.mvgam.R
index cdbb66ca..750e42c0 100644
--- a/R/summary.mvgam.R
+++ b/R/summary.mvgam.R
@@ -101,6 +101,12 @@ if(object$family == 'negative binomial'){
                      variational = object$algorithm %in% c('fullrank', 'meanfield', 'laplace', 'pathfinder'))[,c(3:7)])
 }
 
+if(object$family == 'beta_binomial'){
+  cat("\nObservation dispersion parameter estimates:\n")
+  print(mcmc_summary(object$model_output, 'phi', digits = digits,
+                     variational = object$algorithm %in% c('fullrank', 'meanfield', 'laplace', 'pathfinder'))[,c(3:7)])
+}
+
 if(object$family == 'beta'){
   cat("\nObservation precision parameter estimates:\n")
   print(mcmc_summary(object$model_output, 'phi', digits = digits,
diff --git a/R/validations.R b/R/validations.R
index f43dd21b..35cbcbbc 100644
--- a/R/validations.R
+++ b/R/validations.R
@@ -585,12 +585,18 @@ check_obs_intercept = function(formula, orig_formula){
   if(length(attr(terms(formula), 'term.labels')) == 0 &
      !attr(terms(formula), 'intercept') == 1){
     formula_envir <- attr(formula, '.Environment')
+
+    if(length(attr(terms(formula), 'factors')) == 0){
+      resp <- as.character(attr(terms(formula), 'variables'))[2]
+    } else {
+      resp <- dimnames(attr(terms(formula), 'factors'))[[1]][1]
+    }
+
     if(!is.null(attr(terms(formula(formula)), 'offset'))){
-      formula <- formula(paste0(rlang::f_lhs(formula),
-                                ' ~ ', paste(gsub(' - 1',' + 1',
+      formula <- formula(paste0(resp, ' ~ ', paste(gsub(' - 1',' + 1',
                                                   rlang::f_text(formula)))))
     } else {
-      formula <- formula(paste(rlang::f_lhs(formula), '~ 1'))
+      formula <- formula(paste(resp, '~ 1'))
     }
     attr(formula, '.Environment') <- formula_envir
     drop_obs_intercept <- TRUE
diff --git a/docs/articles/mvgam_overview.html b/docs/articles/mvgam_overview.html
index 07699b3c..3fc2914d 100644
--- a/docs/articles/mvgam_overview.html
+++ b/docs/articles/mvgam_overview.html
@@ -78,7 +78,7 @@
                         <h4 data-toc-skip class="author">Nicholas J
 Clark</h4>
             
-            <h4 data-toc-skip class="date">2024-03-19</h4>
+            <h4 data-toc-skip class="date">2024-03-20</h4>
       
       <small class="dont-index">Source: <a href="https://github.com/nicholasjclark/mvgam/blob/HEAD/vignettes/mvgam_overview.Rmd" class="external-link"><code>vignettes/mvgam_overview.Rmd</code></a></small>
       <div class="d-none name"><code>mvgam_overview.Rmd</code></div>
@@ -203,6 +203,13 @@ <h2 id="supported-observation-families">Supported observation families<a class="
 <td align="center">-</td>
 </tr>
 <tr class="even">
+<td align="center">Beta-Binomial (logit link)</td>
+<td align="center"><code>beta_binomial()</code></td>
+<td align="center">Non-negative integers in <span class="math inline">\((0,1,2,...)\)</span>
+</td>
+<td align="center"><span class="math inline">\(\phi\)</span></td>
+</tr>
+<tr class="odd">
 <td align="center">Poisson Binomial N-mixture (log link)</td>
 <td align="center"><code><a href="../reference/mvgam_families.html">nmix()</a></code></td>
 <td align="center">Non-negative integers in <span class="math inline">\((0,1,2,...)\)</span>
@@ -432,16 +439,7 @@ <h2 id="example-time-series-data">Example time series data<a class="anchor" aria
 <code class="sourceCode R"><span><span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://pillar.r-lib.org/reference/glimpse.html" class="external-link">glimpse</a></span><span class="op">(</span><span class="va">portal_data</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">## Rows: 199</span></span>
 <span><span class="co">## Columns: 10</span></span>
-<span><span class="co">## $ moon          <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 3…</span></span>
-<span><span class="co">## $ DM            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 10, 14, 9, NA, 15, NA, NA, 9, 5, 8, NA, 14, 7, NA, NA, 9…</span></span>
-<span><span class="co">## $ DO            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 6, 8, 1, NA, 8, NA, NA, 3, 3, 4, NA, 3, 8, NA, NA, 3, NA…</span></span>
-<span><span class="co">## $ PP            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 0, 1, 2, NA, 10, NA, NA, 16, 18, 12, NA, 3, 2, NA, NA, 1…</span></span>
-<span><span class="co">## $ OT            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2, 0, 1, NA, 1, NA, NA, 1, 0, 0, NA, 2, 1, NA, NA, 1, NA…</span></span>
-<span><span class="co">## $ year          <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 20…</span></span>
-<span><span class="co">## $ month         <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 1, 2, 3, 4, 5, 6,…</span></span>
-<span><span class="co">## $ mintemp       <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> -9.710, -5.924, -0.220, 1.931, 6.568, 11.590, 14.370, 16…</span></span>
-<span><span class="co">## $ precipitation <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 37.8, 8.7, 43.5, 23.9, 0.9, 1.4, 20.3, 91.0, 60.5, 25.2,…</span></span>
-<span><span class="co">## $ ndvi          <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.4658889, 1.5585069, 1.3378172, 1.6589129, 1.8536561, 1…</span></span></code></pre>
+<span><span class="co">## $ moon          <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 3…</span><span class="co">## $ DM            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 10, 14, 9, NA, 15, NA, NA, 9, 5, 8, NA, 14, 7, NA, NA, 9…</span><span class="co">## $ DO            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 6, 8, 1, NA, 8, NA, NA, 3, 3, 4, NA, 3, 8, NA, NA, 3, NA…</span><span class="co">## $ PP            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 0, 1, 2, NA, 10, NA, NA, 16, 18, 12, NA, 3, 2, NA, NA, 1…</span><span class="co">## $ OT            <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2, 0, 1, NA, 1, NA, NA, 1, 0, 0, NA, 2, 1, NA, NA, 1, NA…</span><span class="co">## $ year          <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 20…</span><span class="co">## $ month         <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 1, 2, 3, 4, 5, 6,…</span><span class="co">## $ mintemp       <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> -9.710, -5.924, -0.220, 1.931, 6.568, 11.590, 14.370, 16…</span><span class="co">## $ precipitation <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 37.8, 8.7, 43.5, 23.9, 0.9, 1.4, 20.3, 91.0, 60.5, 25.2,…</span><span class="co">## $ ndvi          <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.4658889, 1.5585069, 1.3378172, 1.6589129, 1.8536561, 1…</span></span></code></pre>
 <p>We will focus analyses on the time series of captures for one
 specific rodent species, the Desert Pocket Mouse <em>Chaetodipus
 penicillatus</em>. This species is interesting in that it goes into a
@@ -462,18 +460,18 @@ <h2 id="manipulating-data-for-modelling">Manipulating data for modelling<a class
 <code class="sourceCode R"><span><span class="va">data</span> <span class="op">&lt;-</span> <span class="fu"><a href="../reference/sim_mvgam.html">sim_mvgam</a></span><span class="op">(</span>n_series <span class="op">=</span> <span class="fl">4</span>, T <span class="op">=</span> <span class="fl">24</span><span class="op">)</span></span>
 <span><span class="fu"><a href="https://rdrr.io/r/utils/head.html" class="external-link">head</a></span><span class="op">(</span><span class="va">data</span><span class="op">$</span><span class="va">data_train</span>, <span class="fl">12</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">##    y season year   series time</span></span>
-<span><span class="co">## 1  7      1    1 series_1    1</span></span>
-<span><span class="co">## 2  4      1    1 series_2    1</span></span>
-<span><span class="co">## 3  2      1    1 series_3    1</span></span>
-<span><span class="co">## 4  3      1    1 series_4    1</span></span>
-<span><span class="co">## 5  3      2    1 series_1    2</span></span>
-<span><span class="co">## 6  1      2    1 series_2    2</span></span>
-<span><span class="co">## 7  3      2    1 series_3    2</span></span>
+<span><span class="co">## 1  0      1    1 series_1    1</span></span>
+<span><span class="co">## 2  0      1    1 series_2    1</span></span>
+<span><span class="co">## 3  3      1    1 series_3    1</span></span>
+<span><span class="co">## 4  2      1    1 series_4    1</span></span>
+<span><span class="co">## 5  1      2    1 series_1    2</span></span>
+<span><span class="co">## 6  0      2    1 series_2    2</span></span>
+<span><span class="co">## 7  5      2    1 series_3    2</span></span>
 <span><span class="co">## 8  1      2    1 series_4    2</span></span>
-<span><span class="co">## 9  4      3    1 series_1    3</span></span>
-<span><span class="co">## 10 7      3    1 series_2    3</span></span>
-<span><span class="co">## 11 3      3    1 series_3    3</span></span>
-<span><span class="co">## 12 6      3    1 series_4    3</span></span></code></pre>
+<span><span class="co">## 9  1      3    1 series_1    3</span></span>
+<span><span class="co">## 10 0      3    1 series_2    3</span></span>
+<span><span class="co">## 11 2      3    1 series_3    3</span></span>
+<span><span class="co">## 12 3      3    1 series_4    3</span></span></code></pre>
 <p>Notice how we have four different time series in these simulated
 data, but we do not spread the outcome values into different columns.
 Rather, there is only a single column for the outcome variable, labelled
@@ -528,12 +526,7 @@ <h2 id="manipulating-data-for-modelling">Manipulating data for modelling<a class
 <code class="sourceCode R"><span><span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://pillar.r-lib.org/reference/glimpse.html" class="external-link">glimpse</a></span><span class="op">(</span><span class="va">model_data</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">## Rows: 199</span></span>
 <span><span class="co">## Columns: 6</span></span>
-<span><span class="co">## $ series  <span style="color: #949494; font-style: italic;">&lt;fct&gt;</span> PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP…</span></span>
-<span><span class="co">## $ year    <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 20…</span></span>
-<span><span class="co">## $ time    <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,…</span></span>
-<span><span class="co">## $ count   <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 0, 1, 2, NA, 10, NA, NA, 16, 18, 12, NA, 3, 2, NA, NA, 13, NA,…</span></span>
-<span><span class="co">## $ mintemp <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> -9.710, -5.924, -0.220, 1.931, 6.568, 11.590, 14.370, 16.520, …</span></span>
-<span><span class="co">## $ ndvi    <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.4658889, 1.5585069, 1.3378172, 1.6589129, 1.8536561, 1.76132…</span></span></code></pre>
+<span><span class="co">## $ series  <span style="color: #949494; font-style: italic;">&lt;fct&gt;</span> PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP…</span><span class="co">## $ year    <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 20…</span><span class="co">## $ time    <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18,…</span><span class="co">## $ count   <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 0, 1, 2, NA, 10, NA, NA, 16, 18, 12, NA, 3, 2, NA, NA, 13, NA,…</span><span class="co">## $ mintemp <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> -9.710, -5.924, -0.220, 1.931, 6.568, 11.590, 14.370, 16.520, …</span><span class="co">## $ ndvi    <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.4658889, 1.5585069, 1.3378172, 1.6589129, 1.8536561, 1.76132…</span></span></code></pre>
 <p>You can also summarize multiple variables, which is helpful to search
 for data ranges and identify missing values</p>
 <div class="sourceCode" id="cb13"><pre class="downlit sourceCode r">
@@ -609,13 +602,7 @@ <h2 id="glms-with-temporal-random-effects">GLMs with temporal random effects<a c
 <code class="sourceCode R"><span><span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://pillar.r-lib.org/reference/glimpse.html" class="external-link">glimpse</a></span><span class="op">(</span><span class="va">model_data</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">## Rows: 199</span></span>
 <span><span class="co">## Columns: 7</span></span>
-<span><span class="co">## $ series   <span style="color: #949494; font-style: italic;">&lt;fct&gt;</span> PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, P…</span></span>
-<span><span class="co">## $ year     <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2…</span></span>
-<span><span class="co">## $ time     <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18…</span></span>
-<span><span class="co">## $ count    <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 0, 1, 2, NA, 10, NA, NA, 16, 18, 12, NA, 3, 2, NA, NA, 13, NA…</span></span>
-<span><span class="co">## $ mintemp  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> -9.710, -5.924, -0.220, 1.931, 6.568, 11.590, 14.370, 16.520,…</span></span>
-<span><span class="co">## $ ndvi     <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.4658889, 1.5585069, 1.3378172, 1.6589129, 1.8536561, 1.7613…</span></span>
-<span><span class="co">## $ year_fac <span style="color: #949494; font-style: italic;">&lt;fct&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2…</span></span></code></pre>
+<span><span class="co">## $ series   <span style="color: #949494; font-style: italic;">&lt;fct&gt;</span> PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, PP, P…</span><span class="co">## $ year     <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2…</span><span class="co">## $ time     <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18…</span><span class="co">## $ count    <span style="color: #949494; font-style: italic;">&lt;int&gt;</span> 0, 1, 2, NA, 10, NA, NA, 16, 18, 12, NA, 3, 2, NA, NA, 13, NA…</span><span class="co">## $ mintemp  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> -9.710, -5.924, -0.220, 1.931, 6.568, 11.590, 14.370, 16.520,…</span><span class="co">## $ ndvi     <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.4658889, 1.5585069, 1.3378172, 1.6589129, 1.8536561, 1.7613…</span><span class="co">## $ year_fac <span style="color: #949494; font-style: italic;">&lt;fct&gt;</span> 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2004, 2…</span></span></code></pre>
 <div class="sourceCode" id="cb20"><pre class="downlit sourceCode r">
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/base/levels.html" class="external-link">levels</a></span><span class="op">(</span><span class="va">model_data</span><span class="op">$</span><span class="va">year_fac</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">##  [1] "2004" "2005" "2006" "2007" "2008" "2009" "2010" "2011" "2012" "2013"</span></span>
@@ -666,8 +653,8 @@ <h2 id="glms-with-temporal-random-effects">GLMs with temporal random effects<a c
 <span><span class="co">## 1             vector[1] mu_raw;            1 s(year_fac) pop mean</span></span>
 <span><span class="co">## 2 vector&lt;lower=0&gt;[1] sigma_raw;            1   s(year_fac) pop sd</span></span>
 <span><span class="co">##                               prior                 example_change</span></span>
-<span><span class="co">## 1            mu_raw ~ std_normal();   mu_raw ~ normal(0.83, 0.57);</span></span>
-<span><span class="co">## 2 sigma_raw ~ student_t(3, 0, 2.5); sigma_raw ~ exponential(0.98);</span></span>
+<span><span class="co">## 1            mu_raw ~ std_normal();   mu_raw ~ normal(0.22, 0.73);</span></span>
+<span><span class="co">## 2 sigma_raw ~ student_t(3, 0, 2.5); sigma_raw ~ exponential(0.61);</span></span>
 <span><span class="co">##   new_lowerbound new_upperbound</span></span>
 <span><span class="co">## 1             NA             NA</span></span>
 <span><span class="co">## 2             NA             NA</span></span></code></pre>
@@ -703,33 +690,33 @@ <h2 id="glms-with-temporal-random-effects">GLMs with temporal random effects<a c
 <span><span class="co">## </span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## GAM coefficient (beta) estimates:</span></span>
-<span><span class="co">##                 2.5% 50% 97.5% Rhat n_eff</span></span>
-<span><span class="co">## s(year_fac).1   1.80 2.1   2.3 1.00  2530</span></span>
-<span><span class="co">## s(year_fac).2   2.50 2.7   2.8 1.00  3312</span></span>
-<span><span class="co">## s(year_fac).3   3.00 3.1   3.2 1.00  3152</span></span>
-<span><span class="co">## s(year_fac).4   3.10 3.3   3.4 1.00  3083</span></span>
-<span><span class="co">## s(year_fac).5   1.90 2.1   2.3 1.00  3354</span></span>
-<span><span class="co">## s(year_fac).6   1.50 1.8   2.0 1.00  3435</span></span>
-<span><span class="co">## s(year_fac).7   1.80 2.0   2.3 1.00  2734</span></span>
-<span><span class="co">## s(year_fac).8   2.80 3.0   3.1 1.00  2978</span></span>
-<span><span class="co">## s(year_fac).9   3.10 3.2   3.4 1.00  2631</span></span>
-<span><span class="co">## s(year_fac).10  2.60 2.8   2.9 1.00  3071</span></span>
-<span><span class="co">## s(year_fac).11  3.00 3.1   3.2 1.00  2762</span></span>
-<span><span class="co">## s(year_fac).12  3.10 3.2   3.3 1.00  2633</span></span>
-<span><span class="co">## s(year_fac).13  2.00 2.2   2.5 1.00  2832</span></span>
-<span><span class="co">## s(year_fac).14  2.50 2.6   2.8 1.00  3039</span></span>
-<span><span class="co">## s(year_fac).15  1.90 2.2   2.4 1.00  2728</span></span>
-<span><span class="co">## s(year_fac).16  1.90 2.1   2.3 1.00  2777</span></span>
-<span><span class="co">## s(year_fac).17 -0.24 1.1   1.9 1.01   395</span></span>
+<span><span class="co">##                2.5% 50% 97.5% Rhat n_eff</span></span>
+<span><span class="co">## s(year_fac).1   1.8 2.1   2.3 1.00  2637</span></span>
+<span><span class="co">## s(year_fac).2   2.5 2.7   2.8 1.00  3594</span></span>
+<span><span class="co">## s(year_fac).3   3.0 3.1   3.2 1.00  2551</span></span>
+<span><span class="co">## s(year_fac).4   3.1 3.3   3.4 1.00  2290</span></span>
+<span><span class="co">## s(year_fac).5   2.0 2.1   2.3 1.00  3151</span></span>
+<span><span class="co">## s(year_fac).6   1.5 1.8   2.0 1.00  2932</span></span>
+<span><span class="co">## s(year_fac).7   1.8 2.0   2.3 1.00  2206</span></span>
+<span><span class="co">## s(year_fac).8   2.8 3.0   3.1 1.00  3013</span></span>
+<span><span class="co">## s(year_fac).9   3.1 3.2   3.4 1.00  3040</span></span>
+<span><span class="co">## s(year_fac).10  2.6 2.8   2.9 1.00  3084</span></span>
+<span><span class="co">## s(year_fac).11  3.0 3.1   3.2 1.00  2802</span></span>
+<span><span class="co">## s(year_fac).12  3.1 3.2   3.3 1.00  2458</span></span>
+<span><span class="co">## s(year_fac).13  2.0 2.2   2.4 1.00  2452</span></span>
+<span><span class="co">## s(year_fac).14  2.5 2.6   2.8 1.00  2853</span></span>
+<span><span class="co">## s(year_fac).15  1.9 2.2   2.4 1.00  2628</span></span>
+<span><span class="co">## s(year_fac).16  1.9 2.1   2.3 1.00  3396</span></span>
+<span><span class="co">## s(year_fac).17 -0.4 1.1   2.0 1.01   461</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## GAM group-level estimates:</span></span>
 <span><span class="co">##                   2.5%  50% 97.5% Rhat n_eff</span></span>
-<span><span class="co">## mean(s(year_fac)) 2.00 2.40   2.8 1.04   177</span></span>
-<span><span class="co">## sd(s(year_fac))   0.46 0.67   1.1 1.02   187</span></span>
+<span><span class="co">## mean(s(year_fac)) 2.00 2.40   2.8 1.01   220</span></span>
+<span><span class="co">## sd(s(year_fac))   0.45 0.67   1.2 1.01   217</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## Approximate significance of GAM observation smooths:</span></span>
 <span><span class="co">##              edf Chi.sq p-value    </span></span>
-<span><span class="co">## s(year_fac) 13.2  23780  &lt;2e-16 ***</span></span>
+<span><span class="co">## s(year_fac) 13.6  23971  &lt;2e-16 ***</span></span>
 <span><span class="co">## ---</span></span>
 <span><span class="co">## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1</span></span>
 <span><span class="co">## </span></span>
@@ -740,7 +727,7 @@ <h2 id="glms-with-temporal-random-effects">GLMs with temporal random effects<a c
 <span><span class="co">## 0 of 2000 iterations saturated the maximum tree depth of 12 (0%)</span></span>
 <span><span class="co">## E-FMI indicated no pathological behavior</span></span>
 <span><span class="co">## </span></span>
-<span><span class="co">## Samples were drawn using NUTS(diag_e) at Tue Mar 19 8:28:08 PM 2024.</span></span>
+<span><span class="co">## Samples were drawn using NUTS(diag_e) at Wed Mar 20 12:45:19 PM 2024.</span></span>
 <span><span class="co">## For each parameter, n_eff is a crude measure of effective sample size,</span></span>
 <span><span class="co">## and Rhat is the potential scale reduction factor on split MCMC chains</span></span>
 <span><span class="co">## (at convergence, Rhat = 1)</span></span></code></pre>
@@ -757,23 +744,7 @@ <h2 id="glms-with-temporal-random-effects">GLMs with temporal random effects<a c
 <span><span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://pillar.r-lib.org/reference/glimpse.html" class="external-link">glimpse</a></span><span class="op">(</span><span class="va">beta_post</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">## Rows: 2,000</span></span>
 <span><span class="co">## Columns: 17</span></span>
-<span><span class="co">## $ `s(year_fac).1`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.10719, 1.84148, 2.09031, 1.99645, 1.89519, 2.32681,…</span></span>
-<span><span class="co">## $ `s(year_fac).2`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.64680, 2.60113, 2.60112, 2.67128, 2.72142, 2.77772,…</span></span>
-<span><span class="co">## $ `s(year_fac).3`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.06806, 3.09942, 3.10396, 3.05559, 3.19458, 3.04722,…</span></span>
-<span><span class="co">## $ `s(year_fac).4`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.24583, 3.27424, 3.28145, 3.28802, 3.34745, 3.16518,…</span></span>
-<span><span class="co">## $ `s(year_fac).5`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.13682, 2.05123, 2.24092, 2.04121, 2.04692, 2.15410,…</span></span>
-<span><span class="co">## $ `s(year_fac).6`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.69868, 1.62389, 1.82894, 1.61801, 1.45469, 1.89638,…</span></span>
-<span><span class="co">## $ `s(year_fac).7`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.04330, 1.99559, 1.85308, 2.06643, 1.83595, 2.05449,…</span></span>
-<span><span class="co">## $ `s(year_fac).8`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.04806, 2.96831, 3.01446, 2.87108, 2.83922, 3.07323,…</span></span>
-<span><span class="co">## $ `s(year_fac).9`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.25574, 3.21967, 3.18432, 3.28309, 3.26028, 3.20790,…</span></span>
-<span><span class="co">## $ `s(year_fac).10` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.87515, 2.70384, 2.87162, 2.70043, 2.73320, 2.84259,…</span></span>
-<span><span class="co">## $ `s(year_fac).11` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.16056, 2.96399, 3.02085, 3.10226, 3.04427, 3.01406,…</span></span>
-<span><span class="co">## $ `s(year_fac).12` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.21950, 3.23579, 3.17361, 3.22893, 3.11542, 3.25504,…</span></span>
-<span><span class="co">## $ `s(year_fac).13` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.17382, 2.33856, 2.23563, 2.30508, 2.00185, 2.37041,…</span></span>
-<span><span class="co">## $ `s(year_fac).14` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.64984, 2.64589, 2.63238, 2.65891, 2.65566, 2.58927,…</span></span>
-<span><span class="co">## $ `s(year_fac).15` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.21972, 2.28461, 2.01943, 2.22111, 1.88685, 2.01941,…</span></span>
-<span><span class="co">## $ `s(year_fac).16` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.22980, 2.13542, 2.09359, 2.06566, 2.23790, 2.25561,…</span></span>
-<span><span class="co">## $ `s(year_fac).17` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 0.308868, 0.815146, 0.503097, 1.198730, 1.326820, 1.8…</span></span></code></pre>
+<span><span class="co">## $ `s(year_fac).1`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.92642, 2.19665, 2.16313, 2.03566, 2.15104, 1.92071,…</span><span class="co">## $ `s(year_fac).2`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.67248, 2.70231, 2.60057, 2.78605, 2.77953, 2.73222,…</span><span class="co">## $ `s(year_fac).3`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.14045, 3.07568, 3.13833, 3.03790, 3.12393, 3.02771,…</span><span class="co">## $ `s(year_fac).4`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.24309, 3.22446, 3.35237, 3.13655, 3.23618, 3.24420,…</span><span class="co">## $ `s(year_fac).5`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.17217, 2.07900, 2.14755, 2.09468, 2.16868, 2.30804,…</span><span class="co">## $ `s(year_fac).6`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.85819, 1.63267, 1.72401, 1.87349, 1.83451, 1.75134,…</span><span class="co">## $ `s(year_fac).7`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.83016, 2.22052, 2.13839, 2.10892, 2.01269, 2.05681,…</span><span class="co">## $ `s(year_fac).8`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.97366, 2.98092, 3.02094, 2.93842, 2.95703, 3.13029,…</span><span class="co">## $ `s(year_fac).9`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.23032, 3.26561, 3.27845, 3.19429, 3.36769, 3.31548,…</span><span class="co">## $ `s(year_fac).10` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.86251, 2.63846, 2.55524, 2.91906, 2.73098, 2.79875,…</span><span class="co">## $ `s(year_fac).11` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.14743, 3.01673, 3.07020, 3.06300, 3.12565, 3.12485,…</span><span class="co">## $ `s(year_fac).12` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 3.24225, 3.20436, 3.25989, 3.15187, 3.21118, 3.16704,…</span><span class="co">## $ `s(year_fac).13` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.27496, 2.25070, 2.20717, 2.24621, 2.33820, 2.19569,…</span><span class="co">## $ `s(year_fac).14` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.44327, 2.85021, 2.60063, 2.74303, 2.69409, 2.67816,…</span><span class="co">## $ `s(year_fac).15` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.00560, 2.31469, 1.96629, 2.28595, 2.08255, 2.27321,…</span><span class="co">## $ `s(year_fac).16` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.17940, 1.94046, 2.05165, 2.04293, 2.12983, 2.10764,…</span><span class="co">## $ `s(year_fac).17` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.1953300, 1.2777200, 0.6089050, -0.1532210, 2.068590…</span></span></code></pre>
 <p>With any model fitted in <code>mvgam</code>, the underlying
 <code>Stan</code> code can be viewed using the <code>code</code>
 function:</p>
@@ -894,7 +865,7 @@ <h3 id="bayesplot-support">
 <span><span class="co">##  $ test_observations : NULL</span></span>
 <span><span class="co">##  $ test_times        : NULL</span></span>
 <span><span class="co">##  $ hindcasts         :List of 1</span></span>
-<span><span class="co">##   ..$ PP: num [1:2000, 1:199] 8 8 10 12 8 20 3 7 10 7 ...</span></span>
+<span><span class="co">##   ..$ PP: num [1:2000, 1:199] 6 7 15 4 15 4 13 6 12 7 ...</span></span>
 <span><span class="co">##   .. ..- attr(*, "dimnames")=List of 2</span></span>
 <span><span class="co">##   .. .. ..$ : NULL</span></span>
 <span><span class="co">##   .. .. ..$ : chr [1:199] "ypred[1,1]" "ypred[2,1]" "ypred[3,1]" "ypred[4,1]" ...</span></span>
@@ -907,7 +878,7 @@ <h3 id="bayesplot-support">
 <div class="sourceCode" id="cb36"><pre class="downlit sourceCode r">
 <code class="sourceCode R"><span><span class="va">hc</span> <span class="op">&lt;-</span> <span class="fu"><a href="../reference/hindcast.mvgam.html">hindcast</a></span><span class="op">(</span><span class="va">model1</span>, type <span class="op">=</span> <span class="st">'link'</span><span class="op">)</span></span>
 <span><span class="fu"><a href="https://rdrr.io/r/base/range.html" class="external-link">range</a></span><span class="op">(</span><span class="va">hc</span><span class="op">$</span><span class="va">hindcasts</span><span class="op">$</span><span class="va">PP</span><span class="op">)</span></span></code></pre></div>
-<pre><code><span><span class="co">## [1] -2.17311  3.46006</span></span></code></pre>
+<pre><code><span><span class="co">## [1] -1.52007  3.46491</span></span></code></pre>
 <p>Objects of class <code>mvgam_forecast</code> have an associated plot
 function as well:</p>
 <div class="sourceCode" id="cb38"><pre class="downlit sourceCode r">
@@ -959,14 +930,14 @@ <h2 id="automatic-forecasting-for-new-data">Automatic forecasting for new data<a
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/graphics/plot.default.html" class="external-link">plot</a></span><span class="op">(</span><span class="va">model1b</span>, type <span class="op">=</span> <span class="st">'forecast'</span><span class="op">)</span></span></code></pre></div>
 <p><img src="mvgam_overview_files/figure-html/unnamed-chunk-18-1.png" width="60%" style="display: block; margin: auto;"></p>
 <pre><code><span><span class="co">## Out of sample DRPS:</span></span>
-<span><span class="co">## [1] 183.5703</span></span></code></pre>
+<span><span class="co">## [1] 183.3115</span></span></code></pre>
 <p>We can also view the test data in the forecast plot to see that the
 predictions do not capture the temporal variation in the test set</p>
 <div class="sourceCode" id="cb45"><pre class="downlit sourceCode r">
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/graphics/plot.default.html" class="external-link">plot</a></span><span class="op">(</span><span class="va">model1b</span>, type <span class="op">=</span> <span class="st">'forecast'</span>, newdata <span class="op">=</span> <span class="va">data_test</span><span class="op">)</span></span></code></pre></div>
 <p><img src="mvgam_overview_files/figure-html/Plotting%20predictions%20against%20test%20data-1.png" width="60%" style="display: block; margin: auto;"></p>
 <pre><code><span><span class="co">## Out of sample DRPS:</span></span>
-<span><span class="co">## [1] 183.5703</span></span></code></pre>
+<span><span class="co">## [1] 183.3115</span></span></code></pre>
 <p>As with the <code>hindcast</code> function, we can use the
 <code>forecast</code> function to automatically extract the posterior
 distributions for these predictions. This also returns an object of
@@ -994,12 +965,12 @@ <h2 id="automatic-forecasting-for-new-data">Automatic forecasting for new data<a
 <span><span class="co">##   ..$ PP: int [1:39] NA 0 0 10 3 14 18 NA 28 46 ...</span></span>
 <span><span class="co">##  $ test_times        : num [1:39] 161 162 163 164 165 166 167 168 169 170 ...</span></span>
 <span><span class="co">##  $ hindcasts         :List of 1</span></span>
-<span><span class="co">##   ..$ PP: num [1:2000, 1:160] 7 8 7 5 7 7 4 10 12 12 ...</span></span>
+<span><span class="co">##   ..$ PP: num [1:2000, 1:160] 8 10 10 5 5 8 12 9 3 2 ...</span></span>
 <span><span class="co">##   .. ..- attr(*, "dimnames")=List of 2</span></span>
 <span><span class="co">##   .. .. ..$ : NULL</span></span>
 <span><span class="co">##   .. .. ..$ : chr [1:160] "ypred[1,1]" "ypred[2,1]" "ypred[3,1]" "ypred[4,1]" ...</span></span>
 <span><span class="co">##  $ forecasts         :List of 1</span></span>
-<span><span class="co">##   ..$ PP: num [1:2000, 1:39] 9 13 9 9 12 12 6 10 5 6 ...</span></span>
+<span><span class="co">##   ..$ PP: num [1:2000, 1:39] 9 5 17 6 9 7 10 16 4 6 ...</span></span>
 <span><span class="co">##   .. ..- attr(*, "dimnames")=List of 2</span></span>
 <span><span class="co">##   .. .. ..$ : NULL</span></span>
 <span><span class="co">##   .. .. ..$ : chr [1:39] "ypred[161,1]" "ypred[162,1]" "ypred[163,1]" "ypred[164,1]" ...</span></span>
@@ -1056,33 +1027,33 @@ <h2 id="adding-predictors-as-fixed-effects">Adding predictors as “fixed” eff
 <span><span class="co">## </span></span>
 <span><span class="co">## GAM coefficient (beta) estimates:</span></span>
 <span><span class="co">##                2.5%  50% 97.5% Rhat n_eff</span></span>
-<span><span class="co">## ndvi           0.32 0.39  0.46    1  1523</span></span>
-<span><span class="co">## s(year_fac).1  1.10 1.40  1.70    1  2409</span></span>
-<span><span class="co">## s(year_fac).2  1.80 2.00  2.20    1  1936</span></span>
-<span><span class="co">## s(year_fac).3  2.20 2.40  2.60    1  1960</span></span>
-<span><span class="co">## s(year_fac).4  2.30 2.50  2.70    1  1847</span></span>
-<span><span class="co">## s(year_fac).5  1.20 1.40  1.60    1  2065</span></span>
-<span><span class="co">## s(year_fac).6  1.00 1.30  1.50    1  2404</span></span>
-<span><span class="co">## s(year_fac).7  1.20 1.40  1.70    1  2146</span></span>
-<span><span class="co">## s(year_fac).8  2.10 2.30  2.40    1  1908</span></span>
-<span><span class="co">## s(year_fac).9  2.70 2.90  3.00    1  2093</span></span>
-<span><span class="co">## s(year_fac).10 2.00 2.20  2.40    1  2219</span></span>
-<span><span class="co">## s(year_fac).11 2.30 2.40  2.60    1  1881</span></span>
-<span><span class="co">## s(year_fac).12 2.50 2.70  2.80    1  2071</span></span>
-<span><span class="co">## s(year_fac).13 1.40 1.60  1.80    1  1804</span></span>
-<span><span class="co">## s(year_fac).14 0.59 2.00  3.40    1  1265</span></span>
-<span><span class="co">## s(year_fac).15 0.61 2.00  3.40    1  1321</span></span>
-<span><span class="co">## s(year_fac).16 0.74 2.00  3.40    1  1491</span></span>
-<span><span class="co">## s(year_fac).17 0.66 2.00  3.20    1  1394</span></span>
+<span><span class="co">## ndvi           0.32 0.39  0.46    1  2023</span></span>
+<span><span class="co">## s(year_fac).1  1.10 1.40  1.70    1  2658</span></span>
+<span><span class="co">## s(year_fac).2  1.80 2.00  2.20    1  2391</span></span>
+<span><span class="co">## s(year_fac).3  2.20 2.40  2.60    1  2285</span></span>
+<span><span class="co">## s(year_fac).4  2.30 2.50  2.70    1  2159</span></span>
+<span><span class="co">## s(year_fac).5  1.20 1.40  1.60    1  2332</span></span>
+<span><span class="co">## s(year_fac).6  1.00 1.30  1.50    1  2104</span></span>
+<span><span class="co">## s(year_fac).7  1.10 1.40  1.70    1  2287</span></span>
+<span><span class="co">## s(year_fac).8  2.10 2.30  2.50    1  2520</span></span>
+<span><span class="co">## s(year_fac).9  2.70 2.90  3.00    1  1636</span></span>
+<span><span class="co">## s(year_fac).10 2.00 2.20  2.40    1  2789</span></span>
+<span><span class="co">## s(year_fac).11 2.30 2.40  2.60    1  2304</span></span>
+<span><span class="co">## s(year_fac).12 2.50 2.70  2.80    1  2298</span></span>
+<span><span class="co">## s(year_fac).13 1.40 1.60  1.90    1  2897</span></span>
+<span><span class="co">## s(year_fac).14 0.59 2.00  3.30    1  1585</span></span>
+<span><span class="co">## s(year_fac).15 0.44 2.00  3.40    1  1281</span></span>
+<span><span class="co">## s(year_fac).16 0.60 2.00  3.30    1  1766</span></span>
+<span><span class="co">## s(year_fac).17 0.55 2.00  3.40    1  1550</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## GAM group-level estimates:</span></span>
-<span><span class="co">##                   2.5% 50% 97.5% Rhat n_eff</span></span>
-<span><span class="co">## mean(s(year_fac))  1.6 2.0   2.3 1.01   359</span></span>
-<span><span class="co">## sd(s(year_fac))    0.4 0.6   1.0 1.00   478</span></span>
+<span><span class="co">##                   2.5%  50% 97.5% Rhat n_eff</span></span>
+<span><span class="co">## mean(s(year_fac)) 1.50 2.00   2.3 1.01   342</span></span>
+<span><span class="co">## sd(s(year_fac))   0.41 0.61   1.0 1.01   487</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## Approximate significance of GAM observation smooths:</span></span>
 <span><span class="co">##              edf Chi.sq p-value    </span></span>
-<span><span class="co">## s(year_fac) 10.8   2737  &lt;2e-16 ***</span></span>
+<span><span class="co">## s(year_fac) 10.7   2745  &lt;2e-16 ***</span></span>
 <span><span class="co">## ---</span></span>
 <span><span class="co">## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1</span></span>
 <span><span class="co">## </span></span>
@@ -1093,7 +1064,7 @@ <h2 id="adding-predictors-as-fixed-effects">Adding predictors as “fixed” eff
 <span><span class="co">## 0 of 2000 iterations saturated the maximum tree depth of 12 (0%)</span></span>
 <span><span class="co">## E-FMI indicated no pathological behavior</span></span>
 <span><span class="co">## </span></span>
-<span><span class="co">## Samples were drawn using NUTS(diag_e) at Tue Mar 19 8:29:50 PM 2024.</span></span>
+<span><span class="co">## Samples were drawn using NUTS(diag_e) at Wed Mar 20 12:46:22 PM 2024.</span></span>
 <span><span class="co">## For each parameter, n_eff is a crude measure of effective sample size,</span></span>
 <span><span class="co">## and Rhat is the potential scale reduction factor on split MCMC chains</span></span>
 <span><span class="co">## (at convergence, Rhat = 1)</span></span></code></pre>
@@ -1104,24 +1075,24 @@ <h2 id="adding-predictors-as-fixed-effects">Adding predictors as “fixed” eff
 <div class="sourceCode" id="cb52"><pre class="downlit sourceCode r">
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/stats/coef.html" class="external-link">coef</a></span><span class="op">(</span><span class="va">model2</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">##                     2.5%      50%     97.5% Rhat n_eff</span></span>
-<span><span class="co">## ndvi           0.3232597 0.390940 0.4554195    1  1523</span></span>
-<span><span class="co">## s(year_fac).1  1.1243888 1.404795 1.6537325    1  2409</span></span>
-<span><span class="co">## s(year_fac).2  1.8070920 1.998580 2.1898733    1  1936</span></span>
-<span><span class="co">## s(year_fac).3  2.1759217 2.377300 2.5689775    1  1960</span></span>
-<span><span class="co">## s(year_fac).4  2.3105295 2.503245 2.6947637    1  1847</span></span>
-<span><span class="co">## s(year_fac).5  1.1937385 1.419495 1.6404250    1  2065</span></span>
-<span><span class="co">## s(year_fac).6  1.0069897 1.271995 1.5148825    1  2404</span></span>
-<span><span class="co">## s(year_fac).7  1.1534555 1.414145 1.6755780    1  2146</span></span>
-<span><span class="co">## s(year_fac).8  2.0805290 2.268940 2.4431120    1  1908</span></span>
-<span><span class="co">## s(year_fac).9  2.7100590 2.854620 2.9904657    1  2093</span></span>
-<span><span class="co">## s(year_fac).10 1.9802680 2.185950 2.3861060    1  2219</span></span>
-<span><span class="co">## s(year_fac).11 2.2623455 2.435425 2.6061592    1  1881</span></span>
-<span><span class="co">## s(year_fac).12 2.5348080 2.691670 2.8346482    1  2071</span></span>
-<span><span class="co">## s(year_fac).13 1.3660082 1.617220 1.8496322    1  1804</span></span>
-<span><span class="co">## s(year_fac).14 0.5911541 1.972980 3.3520772    1  1265</span></span>
-<span><span class="co">## s(year_fac).15 0.6130088 1.973655 3.3562612    1  1321</span></span>
-<span><span class="co">## s(year_fac).16 0.7371668 1.992295 3.4006835    1  1491</span></span>
-<span><span class="co">## s(year_fac).17 0.6633022 1.968965 3.2350522    1  1394</span></span></code></pre>
+<span><span class="co">## ndvi           0.3222796 0.389041 0.4600176    1  2023</span></span>
+<span><span class="co">## s(year_fac).1  1.1157430 1.407195 1.6551925    1  2658</span></span>
+<span><span class="co">## s(year_fac).2  1.8001732 2.000195 2.2038882    1  2391</span></span>
+<span><span class="co">## s(year_fac).3  2.1784455 2.380785 2.5669165    1  2285</span></span>
+<span><span class="co">## s(year_fac).4  2.3331905 2.504990 2.6800770    1  2159</span></span>
+<span><span class="co">## s(year_fac).5  1.1956660 1.420610 1.6487445    1  2332</span></span>
+<span><span class="co">## s(year_fac).6  1.0293495 1.276125 1.4956345    1  2104</span></span>
+<span><span class="co">## s(year_fac).7  1.1373437 1.409305 1.6728182    1  2287</span></span>
+<span><span class="co">## s(year_fac).8  2.0900953 2.272840 2.4625560    1  2520</span></span>
+<span><span class="co">## s(year_fac).9  2.7241293 2.854795 2.9820272    1  1636</span></span>
+<span><span class="co">## s(year_fac).10 1.9734870 2.187960 2.3829132    1  2789</span></span>
+<span><span class="co">## s(year_fac).11 2.2672130 2.438820 2.5939667    1  2304</span></span>
+<span><span class="co">## s(year_fac).12 2.5427050 2.696400 2.8361120    1  2298</span></span>
+<span><span class="co">## s(year_fac).13 1.3838170 1.618020 1.8513388    1  2897</span></span>
+<span><span class="co">## s(year_fac).14 0.5922547 1.986470 3.3460117    1  1585</span></span>
+<span><span class="co">## s(year_fac).15 0.4393822 1.999515 3.4099707    1  1281</span></span>
+<span><span class="co">## s(year_fac).16 0.6003343 1.963845 3.3314300    1  1766</span></span>
+<span><span class="co">## s(year_fac).17 0.5451825 1.973770 3.4078517    1  1550</span></span></code></pre>
 <p>Look at the estimated effect of <code>ndvi</code> using
 <code>plot.mvgam</code> with <code>type = 'pterms'</code></p>
 <div class="sourceCode" id="cb54"><pre class="downlit sourceCode r">
@@ -1137,24 +1108,7 @@ <h2 id="adding-predictors-as-fixed-effects">Adding predictors as “fixed” eff
 <span><span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://pillar.r-lib.org/reference/glimpse.html" class="external-link">glimpse</a></span><span class="op">(</span><span class="va">beta_post</span><span class="op">)</span></span></code></pre></div>
 <pre><code><span><span class="co">## Rows: 2,000</span></span>
 <span><span class="co">## Columns: 18</span></span>
-<span><span class="co">## $ ndvi             <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 0.369188, 0.358142, 0.380097, 0.388986, 0.411054, 0.3…</span></span>
-<span><span class="co">## $ `s(year_fac).1`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.46468, 1.33541, 1.53760, 1.23095, 1.39416, 1.43930,…</span></span>
-<span><span class="co">## $ `s(year_fac).2`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.98631, 2.01659, 2.04716, 1.98244, 1.94622, 2.06119,…</span></span>
-<span><span class="co">## $ `s(year_fac).3`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.37212, 2.47199, 2.44498, 2.39509, 2.29522, 2.39798,…</span></span>
-<span><span class="co">## $ `s(year_fac).4`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.66554, 2.38987, 2.63843, 2.53246, 2.44160, 2.61544,…</span></span>
-<span><span class="co">## $ `s(year_fac).5`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.44484, 1.46517, 1.39152, 1.55978, 1.53860, 1.54923,…</span></span>
-<span><span class="co">## $ `s(year_fac).6`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.269120, 1.381480, 1.182820, 1.339260, 1.357210, 1.3…</span></span>
-<span><span class="co">## $ `s(year_fac).7`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.37845, 1.56485, 1.45465, 1.52910, 1.55788, 1.62370,…</span></span>
-<span><span class="co">## $ `s(year_fac).8`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.18331, 2.43159, 2.24811, 2.22532, 2.18192, 2.33797,…</span></span>
-<span><span class="co">## $ `s(year_fac).9`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.91719, 2.85339, 2.93523, 2.82163, 2.88262, 2.89877,…</span></span>
-<span><span class="co">## $ `s(year_fac).10` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.13800, 2.27441, 2.12414, 2.28971, 2.25759, 2.28692,…</span></span>
-<span><span class="co">## $ `s(year_fac).11` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.35005, 2.57266, 2.41406, 2.28769, 2.36534, 2.51464,…</span></span>
-<span><span class="co">## $ `s(year_fac).12` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.72567, 2.80190, 2.65140, 2.80932, 2.63651, 2.61162,…</span></span>
-<span><span class="co">## $ `s(year_fac).13` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.69251, 1.61900, 1.51345, 1.68852, 1.43159, 1.59473,…</span></span>
-<span><span class="co">## $ `s(year_fac).14` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.383070, 0.591868, 1.237180, 2.221190, 2.692930, 1.5…</span></span>
-<span><span class="co">## $ `s(year_fac).15` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.426070, 1.543630, 1.986580, 2.547520, 1.977620, 2.2…</span></span>
-<span><span class="co">## $ `s(year_fac).16` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 0.644001, 1.734400, 1.530610, 2.579430, 1.377440, 1.5…</span></span>
-<span><span class="co">## $ `s(year_fac).17` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.641660, 2.281480, 2.213900, 1.755700, 0.862163, 1.8…</span></span></code></pre>
+<span><span class="co">## $ ndvi             <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 0.381120, 0.397895, 0.361063, 0.337924, 0.389229, 0.3…</span><span class="co">## $ `s(year_fac).1`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.53760, 1.35799, 1.43393, 1.57832, 1.42582, 1.50809,…</span><span class="co">## $ `s(year_fac).2`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.98443, 1.96584, 2.06308, 2.14220, 1.93708, 1.95439,…</span><span class="co">## $ `s(year_fac).3`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.38175, 2.33931, 2.48983, 2.43840, 2.36809, 2.43313,…</span><span class="co">## $ `s(year_fac).4`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.48232, 2.58219, 2.55500, 2.64511, 2.52126, 2.51045,…</span><span class="co">## $ `s(year_fac).5`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.49842, 1.45023, 1.39687, 1.39184, 1.55064, 1.57819,…</span><span class="co">## $ `s(year_fac).6`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.34528, 1.17654, 1.40190, 1.32767, 1.39663, 1.20023,…</span><span class="co">## $ `s(year_fac).7`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.14149, 1.10700, 1.74106, 1.67281, 1.38958, 1.33037,…</span><span class="co">## $ `s(year_fac).8`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.32211, 2.22621, 2.31916, 2.37388, 2.32681, 2.26146,…</span><span class="co">## $ `s(year_fac).9`  <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.84581, 2.80193, 2.93651, 2.92311, 2.87122, 2.80573,…</span><span class="co">## $ `s(year_fac).10` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.13229, 2.29285, 2.06515, 2.30390, 2.11258, 2.08253,…</span><span class="co">## $ `s(year_fac).11` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.51404, 2.39615, 2.47567, 2.60872, 2.47847, 2.41808,…</span><span class="co">## $ `s(year_fac).12` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.70749, 2.64577, 2.73533, 2.78696, 2.64888, 2.75584,…</span><span class="co">## $ `s(year_fac).13` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.68161, 1.60027, 1.66338, 1.75563, 1.53293, 1.66814,…</span><span class="co">## $ `s(year_fac).14` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.506520, 1.608860, 1.536620, 1.224190, 2.750580, 2.5…</span><span class="co">## $ `s(year_fac).15` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 1.880880, 2.025040, 2.065720, 1.918390, 3.061930, 3.5…</span><span class="co">## $ `s(year_fac).16` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 0.973767, 0.834117, 1.072870, 1.100700, 1.892870, 2.2…</span><span class="co">## $ `s(year_fac).17` <span style="color: #949494; font-style: italic;">&lt;dbl&gt;</span> 2.22516, 2.34161, 1.82350, 2.11547, 1.47345, 1.88079,…</span></span></code></pre>
 <p>The posterior distribution for the effect of <code>ndvi</code> is
 stored in the <code>ndvi</code> column. A quick histogram confirms our
 inference that <code>log(counts)</code> respond positively to increases
@@ -1290,27 +1244,27 @@ <h2 id="adding-predictors-as-smooths">Adding predictors as smooths<a class="anch
 <span><span class="co">## </span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## GAM coefficient (beta) estimates:</span></span>
-<span><span class="co">##              2.5%   50%  97.5% Rhat n_eff</span></span>
-<span><span class="co">## (Intercept)  2.00  2.10  2.200 1.00  1123</span></span>
-<span><span class="co">## ndvi         0.26  0.33  0.390 1.00  1157</span></span>
-<span><span class="co">## s(time).1   -2.20 -1.10 -0.074 1.01   515</span></span>
-<span><span class="co">## s(time).2    0.37  1.30  2.300 1.01   397</span></span>
-<span><span class="co">## s(time).3   -0.54  0.46  1.500 1.01   368</span></span>
-<span><span class="co">## s(time).4    1.50  2.50  3.500 1.01   356</span></span>
-<span><span class="co">## s(time).5   -1.20 -0.21  0.830 1.01   368</span></span>
-<span><span class="co">## s(time).6   -0.66  0.38  1.500 1.01   384</span></span>
-<span><span class="co">## s(time).7   -1.70 -0.52  0.480 1.01   408</span></span>
-<span><span class="co">## s(time).8    0.52  1.50  2.500 1.01   355</span></span>
-<span><span class="co">## s(time).9    1.10  2.10  3.100 1.01   357</span></span>
-<span><span class="co">## s(time).10  -0.42  0.56  1.600 1.01   365</span></span>
-<span><span class="co">## s(time).11   0.75  1.70  2.800 1.01   348</span></span>
-<span><span class="co">## s(time).12   0.62  1.50  2.500 1.01   377</span></span>
-<span><span class="co">## s(time).13  -1.20 -0.30  0.600 1.01   433</span></span>
-<span><span class="co">## s(time).14  -7.70 -4.20 -1.000 1.01   420</span></span>
+<span><span class="co">##              2.5%   50%   97.5% Rhat n_eff</span></span>
+<span><span class="co">## (Intercept)  2.00  2.10  2.2000 1.00   913</span></span>
+<span><span class="co">## ndvi         0.26  0.33  0.3900 1.00   918</span></span>
+<span><span class="co">## s(time).1   -2.20 -1.10 -0.0073 1.00   418</span></span>
+<span><span class="co">## s(time).2    0.42  1.30  2.3000 1.00   335</span></span>
+<span><span class="co">## s(time).3   -0.60  0.42  1.5000 1.01   309</span></span>
+<span><span class="co">## s(time).4    1.50  2.40  3.6000 1.00   303</span></span>
+<span><span class="co">## s(time).5   -1.20 -0.25  0.8400 1.00   308</span></span>
+<span><span class="co">## s(time).6   -0.62  0.34  1.5000 1.00   311</span></span>
+<span><span class="co">## s(time).7   -1.50 -0.57  0.5600 1.00   348</span></span>
+<span><span class="co">## s(time).8    0.54  1.40  2.6000 1.00   296</span></span>
+<span><span class="co">## s(time).9    1.10  2.00  3.1000 1.01   288</span></span>
+<span><span class="co">## s(time).10  -0.39  0.50  1.6000 1.00   299</span></span>
+<span><span class="co">## s(time).11   0.77  1.70  2.8000 1.01   288</span></span>
+<span><span class="co">## s(time).12   0.59  1.40  2.5000 1.00   313</span></span>
+<span><span class="co">## s(time).13  -1.20 -0.33  0.6500 1.01   375</span></span>
+<span><span class="co">## s(time).14  -7.70 -4.10 -1.2000 1.00   381</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## Approximate significance of GAM observation smooths:</span></span>
 <span><span class="co">##          edf Chi.sq p-value    </span></span>
-<span><span class="co">## s(time) 9.36    637  &lt;2e-16 ***</span></span>
+<span><span class="co">## s(time) 10.5    781  &lt;2e-16 ***</span></span>
 <span><span class="co">## ---</span></span>
 <span><span class="co">## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1</span></span>
 <span><span class="co">## </span></span>
@@ -1321,7 +1275,7 @@ <h2 id="adding-predictors-as-smooths">Adding predictors as smooths<a class="anch
 <span><span class="co">## 0 of 2000 iterations saturated the maximum tree depth of 12 (0%)</span></span>
 <span><span class="co">## E-FMI indicated no pathological behavior</span></span>
 <span><span class="co">## </span></span>
-<span><span class="co">## Samples were drawn using NUTS(diag_e) at Tue Mar 19 8:31:14 PM 2024.</span></span>
+<span><span class="co">## Samples were drawn using NUTS(diag_e) at Wed Mar 20 12:47:10 PM 2024.</span></span>
 <span><span class="co">## For each parameter, n_eff is a crude measure of effective sample size,</span></span>
 <span><span class="co">## and Rhat is the potential scale reduction factor on split MCMC chains</span></span>
 <span><span class="co">## (at convergence, Rhat = 1)</span></span></code></pre>
@@ -1448,7 +1402,7 @@ <h2 id="latent-dynamics-in-mvgam">Latent dynamics in <code>mvgam</code><a class=
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/graphics/plot.default.html" class="external-link">plot</a></span><span class="op">(</span><span class="va">model3</span>, type <span class="op">=</span> <span class="st">'forecast'</span>, newdata <span class="op">=</span> <span class="va">data_test</span><span class="op">)</span></span></code></pre></div>
 <p><img src="mvgam_overview_files/figure-html/unnamed-chunk-31-1.png" width="60%" style="display: block; margin: auto;"></p>
 <pre><code><span><span class="co">## Out of sample DRPS:</span></span>
-<span><span class="co">## [1] 287.0294</span></span></code></pre>
+<span><span class="co">## [1] 287.1895</span></span></code></pre>
 <p>Why is this happening? The forecasts are driven almost entirely by
 variation in the temporal spline, which is extrapolating linearly
 <em>forever</em> beyond the edge of the training data. Any slight
@@ -1530,34 +1484,34 @@ <h2 id="latent-dynamics-in-mvgam">Latent dynamics in <code>mvgam</code><a class=
 <span><span class="co">## </span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## GAM coefficient (beta) estimates:</span></span>
-<span><span class="co">##               2.5%      50% 97.5% Rhat n_eff</span></span>
-<span><span class="co">## (Intercept)  0.800  1.90000 2.600 1.08    73</span></span>
-<span><span class="co">## s(ndvi).1   -0.075  0.00870 0.150 1.00   740</span></span>
-<span><span class="co">## s(ndvi).2   -0.130  0.01100 0.220 1.01   525</span></span>
-<span><span class="co">## s(ndvi).3   -0.046 -0.00099 0.045 1.01   660</span></span>
-<span><span class="co">## s(ndvi).4   -0.250  0.11000 1.000 1.01   325</span></span>
-<span><span class="co">## s(ndvi).5   -0.067  0.15000 0.360 1.00   698</span></span>
+<span><span class="co">##               2.5%     50% 97.5% Rhat n_eff</span></span>
+<span><span class="co">## (Intercept)  1.200  1.9000 2.500 1.06    54</span></span>
+<span><span class="co">## s(ndvi).1   -0.082  0.0110 0.180 1.00   611</span></span>
+<span><span class="co">## s(ndvi).2   -0.210  0.0200 0.320 1.01   630</span></span>
+<span><span class="co">## s(ndvi).3   -0.058 -0.0022 0.063 1.01   501</span></span>
+<span><span class="co">## s(ndvi).4   -0.290  0.1500 1.300 1.02   315</span></span>
+<span><span class="co">## s(ndvi).5   -0.080  0.1500 0.350 1.00   612</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## Approximate significance of GAM observation smooths:</span></span>
-<span><span class="co">##           edf Chi.sq p-value  </span></span>
-<span><span class="co">## s(ndvi) 0.922     88   0.067 .</span></span>
+<span><span class="co">##          edf Chi.sq p-value  </span></span>
+<span><span class="co">## s(ndvi) 1.75   82.3    0.07 .</span></span>
 <span><span class="co">## ---</span></span>
 <span><span class="co">## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## Latent trend parameter AR estimates:</span></span>
 <span><span class="co">##          2.5%  50% 97.5% Rhat n_eff</span></span>
-<span><span class="co">## ar1[1]   0.70 0.81  0.93 1.01   315</span></span>
-<span><span class="co">## sigma[1] 0.68 0.81  0.97 1.00   410</span></span>
+<span><span class="co">## ar1[1]   0.70 0.81  0.91 1.02   270</span></span>
+<span><span class="co">## sigma[1] 0.67 0.80  0.95 1.01   460</span></span>
 <span><span class="co">## </span></span>
 <span><span class="co">## Stan MCMC diagnostics:</span></span>
 <span><span class="co">## n_eff / iter looks reasonable for all parameters</span></span>
-<span><span class="co">## Rhats above 1.05 found for 76 parameters</span></span>
+<span><span class="co">## Rhats above 1.05 found for 11 parameters</span></span>
 <span><span class="co">##  *Diagnose further to investigate why the chains have not mixed</span></span>
 <span><span class="co">## 0 of 2000 iterations ended with a divergence (0%)</span></span>
 <span><span class="co">## 0 of 2000 iterations saturated the maximum tree depth of 12 (0%)</span></span>
 <span><span class="co">## E-FMI indicated no pathological behavior</span></span>
 <span><span class="co">## </span></span>
-<span><span class="co">## Samples were drawn using NUTS(diag_e) at Tue Mar 19 8:33:11 PM 2024.</span></span>
+<span><span class="co">## Samples were drawn using NUTS(diag_e) at Wed Mar 20 12:48:13 PM 2024.</span></span>
 <span><span class="co">## For each parameter, n_eff is a crude measure of effective sample size,</span></span>
 <span><span class="co">## and Rhat is the potential scale reduction factor on split MCMC chains</span></span>
 <span><span class="co">## (at convergence, Rhat = 1)</span></span></code></pre>
@@ -1573,7 +1527,7 @@ <h2 id="latent-dynamics-in-mvgam">Latent dynamics in <code>mvgam</code><a class=
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/graphics/plot.default.html" class="external-link">plot</a></span><span class="op">(</span><span class="va">model4</span>, type <span class="op">=</span> <span class="st">'forecast'</span>, newdata <span class="op">=</span> <span class="va">data_test</span><span class="op">)</span></span></code></pre></div>
 <p><img src="mvgam_overview_files/figure-html/unnamed-chunk-34-1.png" width="60%" style="display: block; margin: auto;"></p>
 <pre><code><span><span class="co">## Out of sample DRPS:</span></span>
-<span><span class="co">## [1] 154.869</span></span></code></pre>
+<span><span class="co">## [1] 152.284</span></span></code></pre>
 <p>The trend is evolving as an AR1 process, which we can also view:</p>
 <div class="sourceCode" id="cb79"><pre class="downlit sourceCode r">
 <code class="sourceCode R"><span><span class="fu"><a href="https://rdrr.io/r/graphics/plot.default.html" class="external-link">plot</a></span><span class="op">(</span><span class="va">model4</span>, type <span class="op">=</span> <span class="st">'trend'</span>, newdata <span class="op">=</span> <span class="va">data_test</span><span class="op">)</span></span></code></pre></div>
@@ -1588,7 +1542,7 @@ <h2 id="latent-dynamics-in-mvgam">Latent dynamics in <code>mvgam</code><a class=
 <span><span class="co">## Warning: Some Pareto k diagnostic values are too high. See help('pareto-k-diagnostic') for details.</span></span></code></pre>
 <pre><code><span><span class="co">##        elpd_diff se_diff</span></span>
 <span><span class="co">## model4    0.0       0.0 </span></span>
-<span><span class="co">## model3 -556.9      66.0</span></span></code></pre>
+<span><span class="co">## model3 -564.6      66.6</span></span></code></pre>
 <p>The higher estimated log predictive density (ELPD) value for the
 dynamic model suggests it provides a better fit to the in-sample
 data.</p>
@@ -1603,7 +1557,7 @@ <h2 id="latent-dynamics-in-mvgam">Latent dynamics in <code>mvgam</code><a class=
 <span><span class="va">score_mod3</span> <span class="op">&lt;-</span> <span class="fu"><a href="../reference/score.mvgam_forecast.html">score</a></span><span class="op">(</span><span class="va">fc_mod3</span>, score <span class="op">=</span> <span class="st">'drps'</span><span class="op">)</span></span>
 <span><span class="va">score_mod4</span> <span class="op">&lt;-</span> <span class="fu"><a href="../reference/score.mvgam_forecast.html">score</a></span><span class="op">(</span><span class="va">fc_mod4</span>, score <span class="op">=</span> <span class="st">'drps'</span><span class="op">)</span></span>
 <span><span class="fu"><a href="https://rdrr.io/r/base/sum.html" class="external-link">sum</a></span><span class="op">(</span><span class="va">score_mod4</span><span class="op">$</span><span class="va">PP</span><span class="op">$</span><span class="va">score</span>, na.rm <span class="op">=</span> <span class="cn">TRUE</span><span class="op">)</span> <span class="op">-</span> <span class="fu"><a href="https://rdrr.io/r/base/sum.html" class="external-link">sum</a></span><span class="op">(</span><span class="va">score_mod3</span><span class="op">$</span><span class="va">PP</span><span class="op">$</span><span class="va">score</span>, na.rm <span class="op">=</span> <span class="cn">TRUE</span><span class="op">)</span></span></code></pre></div>
-<pre><code><span><span class="co">## [1] -132.1604</span></span></code></pre>
+<pre><code><span><span class="co">## [1] -134.9055</span></span></code></pre>
 <p>A strongly negative value here suggests the score for the dynamic
 model (model 4) is much smaller than the score for the model with a
 smooth function of time (model 3)</p>
diff --git a/docs/articles/mvgam_overview_files/figure-html/Histogram of NDVI effects-1.png b/docs/articles/mvgam_overview_files/figure-html/Histogram of NDVI effects-1.png
index 2cd2505e..588e6641 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Histogram of NDVI effects-1.png and b/docs/articles/mvgam_overview_files/figure-html/Histogram of NDVI effects-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot NDVI effect-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot NDVI effect-1.png
index e93e38c5..90442ecd 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot NDVI effect-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot NDVI effect-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot extrapolated temporal functions using newdata-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot extrapolated temporal functions using newdata-1.png
index 02900155..210c5195 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot extrapolated temporal functions using newdata-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot extrapolated temporal functions using newdata-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot hindcasts on the linear predictor scale-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot hindcasts on the linear predictor scale-1.png
index 10abd20a..be72dca4 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot hindcasts on the linear predictor scale-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot hindcasts on the linear predictor scale-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot posterior hindcasts-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot posterior hindcasts-1.png
index 76261c78..00d4a600 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot posterior hindcasts-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot posterior hindcasts-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot posterior residuals-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot posterior residuals-1.png
index 61a5cf41..5e3669a4 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot posterior residuals-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot posterior residuals-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot random effect estimates-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot random effect estimates-1.png
index 5ba6def3..5212065c 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot random effect estimates-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot random effect estimates-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plot smooth term derivatives-1.png b/docs/articles/mvgam_overview_files/figure-html/Plot smooth term derivatives-1.png
index d3e28695..1839851b 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plot smooth term derivatives-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plot smooth term derivatives-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/Plotting predictions against test data-1.png b/docs/articles/mvgam_overview_files/figure-html/Plotting predictions against test data-1.png
index 1f1001c1..cdb0078d 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/Plotting predictions against test data-1.png and b/docs/articles/mvgam_overview_files/figure-html/Plotting predictions against test data-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-13-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-13-1.png
index 1d59b76e..1bde6d69 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-13-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-13-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-17-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-17-1.png
index 47f6c8da..c7110908 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-17-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-17-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-18-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-18-1.png
index 1f1001c1..cdb0078d 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-18-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-18-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-22-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-22-1.png
index 04503a1c..7a9dd704 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-22-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-22-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-1.png
index 5949f7c4..25335a9f 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-2.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-2.png
index 1bc2a98b..2790a5c2 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-2.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-23-2.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-26-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-26-1.png
index f658558c..295b8b1a 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-26-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-26-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-27-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-27-1.png
index 63b0fd08..fed35a12 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-27-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-27-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-1.png
index 3454de45..fee209c2 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-2.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-2.png
index c576f1bc..88ccfd86 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-2.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-28-2.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-1.png
index 04c6c1a9..2bde938a 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-2.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-2.png
index 5a463aea..72ff0eb1 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-2.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-29-2.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-31-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-31-1.png
index 2139015b..1a4778db 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-31-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-31-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-33-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-33-1.png
index c8b9a114..57ed382c 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-33-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-33-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-34-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-34-1.png
index efa67b9f..992ea3b3 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-34-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-34-1.png differ
diff --git a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-35-1.png b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-35-1.png
index 63983198..b4db0daa 100644
Binary files a/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-35-1.png and b/docs/articles/mvgam_overview_files/figure-html/unnamed-chunk-35-1.png differ
diff --git a/docs/index.html b/docs/index.html
index d653a76f..60719b32 100644
--- a/docs/index.html
+++ b/docs/index.html
@@ -136,6 +136,8 @@ <h2 id="getting-started">Getting started<a class="anchor" aria-label="anchor" hr
 <li>
 <code><a href="https://rdrr.io/r/stats/family.html" class="external-link">binomial()</a></code> for count data with known number of trials</li>
 <li>
+<code>beta_binomial()</code> for overdispersed count data with known number of trials</li>
+<li>
 <code><a href="reference/mvgam_families.html">nmix()</a></code> for count data with imperfect detection (unknown number of trials)</li>
 <li>
 <code><a href="reference/mvgam_families.html">tweedie()</a></code> for overdispersed count data</li>
diff --git a/docs/reference/index.html b/docs/reference/index.html
index 7490b409..0ad03cd1 100644
--- a/docs/reference/index.html
+++ b/docs/reference/index.html
@@ -358,7 +358,7 @@ <h2 id="all-functions">All functions<a class="anchor" aria-label="anchor" href="
           <code><a href="series_to_mvgam.html">series_to_mvgam()</a></code> 
         </dt>
         <dd>This function converts univariate or multivariate time series (<code>xts</code> or <code>ts</code> objects)
-to the format necessary for <code><a href="https://nicholasjclark.github.io/mvgam/reference/mvgam.html">mvgam</a></code></dd>
+to the format necessary for <code>mvgam</code></dd>
       </dl><dl><dt>
           
           <code><a href="sim_mvgam.html">sim_mvgam()</a></code> 
diff --git a/docs/reference/mvgam_families.html b/docs/reference/mvgam_families.html
index 30d5fc4f..b7605ebd 100644
--- a/docs/reference/mvgam_families.html
+++ b/docs/reference/mvgam_families.html
@@ -233,6 +233,33 @@ <h2 id="ref-examples">Examples<a class="anchor" aria-label="anchor" href="#ref-e
 <span class="r-in"><span>                conf_level <span class="op">=</span> <span class="fl">0.5</span><span class="op">)</span> <span class="op">+</span></span></span>
 <span class="r-in"><span> <span class="fu"><a href="https://ggplot2.tidyverse.org/reference/labs.html" class="external-link">ylab</a></span><span class="op">(</span><span class="st">'Observed abundance'</span><span class="op">)</span> <span class="op">+</span></span></span>
 <span class="r-in"><span> <span class="fu"><a href="https://ggplot2.tidyverse.org/reference/ggtheme.html" class="external-link">theme_classic</a></span><span class="op">(</span><span class="op">)</span></span></span>
+<span class="r-in"><span></span></span>
+<span class="r-in"><span><span class="co"># Example showcasing how cbind() is needed for Binomial observations</span></span></span>
+<span class="r-in"><span><span class="co"># Simulate two time series of Binomial trials</span></span></span>
+<span class="r-in"><span><span class="va">trials</span> <span class="op">&lt;-</span> <span class="fu"><a href="https://rdrr.io/r/base/sample.html" class="external-link">sample</a></span><span class="op">(</span><span class="fu"><a href="https://rdrr.io/r/base/c.html" class="external-link">c</a></span><span class="op">(</span><span class="fl">20</span><span class="op">:</span><span class="fl">25</span><span class="op">)</span>, <span class="fl">50</span>, replace <span class="op">=</span> <span class="cn">TRUE</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="va">x</span> <span class="op">&lt;-</span> <span class="fu"><a href="https://rdrr.io/r/stats/Normal.html" class="external-link">rnorm</a></span><span class="op">(</span><span class="fl">50</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="va">detprob1</span> <span class="op">&lt;-</span> <span class="fu"><a href="https://rdrr.io/r/stats/Logistic.html" class="external-link">plogis</a></span><span class="op">(</span><span class="op">-</span><span class="fl">0.5</span> <span class="op">+</span> <span class="fl">0.9</span><span class="op">*</span><span class="va">x</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="va">detprob2</span> <span class="op">&lt;-</span> <span class="fu"><a href="https://rdrr.io/r/stats/Logistic.html" class="external-link">plogis</a></span><span class="op">(</span><span class="op">-</span><span class="fl">0.1</span> <span class="op">-</span><span class="fl">0.7</span><span class="op">*</span><span class="va">x</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="va">dat</span> <span class="op">&lt;-</span> <span class="fu"><a href="https://rdrr.io/r/base/cbind.html" class="external-link">rbind</a></span><span class="op">(</span><span class="fu"><a href="https://rdrr.io/r/base/data.frame.html" class="external-link">data.frame</a></span><span class="op">(</span>y <span class="op">=</span> <span class="fu"><a href="https://rdrr.io/r/stats/Binomial.html" class="external-link">rbinom</a></span><span class="op">(</span>n <span class="op">=</span> <span class="fl">50</span>, size <span class="op">=</span> <span class="va">trials</span>, prob <span class="op">=</span> <span class="va">detprob1</span><span class="op">)</span>,</span></span>
+<span class="r-in"><span>                        time <span class="op">=</span> <span class="fl">1</span><span class="op">:</span><span class="fl">50</span>,</span></span>
+<span class="r-in"><span>                        series <span class="op">=</span> <span class="st">'series1'</span>,</span></span>
+<span class="r-in"><span>                        x <span class="op">=</span> <span class="va">x</span>,</span></span>
+<span class="r-in"><span>                        ntrials <span class="op">=</span> <span class="va">trials</span><span class="op">)</span>,</span></span>
+<span class="r-in"><span>             <span class="fu"><a href="https://rdrr.io/r/base/data.frame.html" class="external-link">data.frame</a></span><span class="op">(</span>y <span class="op">=</span> <span class="fu"><a href="https://rdrr.io/r/stats/Binomial.html" class="external-link">rbinom</a></span><span class="op">(</span>n <span class="op">=</span> <span class="fl">50</span>, size <span class="op">=</span> <span class="va">trials</span>, prob <span class="op">=</span> <span class="va">detprob2</span><span class="op">)</span>,</span></span>
+<span class="r-in"><span>                        time <span class="op">=</span> <span class="fl">1</span><span class="op">:</span><span class="fl">50</span>,</span></span>
+<span class="r-in"><span>                        series <span class="op">=</span> <span class="st">'series2'</span>,</span></span>
+<span class="r-in"><span>                        x <span class="op">=</span> <span class="va">x</span>,</span></span>
+<span class="r-in"><span>                        ntrials <span class="op">=</span> <span class="va">trials</span><span class="op">)</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="va">dat</span> <span class="op">&lt;-</span> <span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://dplyr.tidyverse.org/reference/mutate.html" class="external-link">mutate</a></span><span class="op">(</span><span class="va">dat</span>, series <span class="op">=</span> <span class="fu"><a href="https://rdrr.io/r/base/factor.html" class="external-link">as.factor</a></span><span class="op">(</span><span class="va">series</span><span class="op">)</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="va">dat</span> <span class="op">&lt;-</span> <span class="fu">dplyr</span><span class="fu">::</span><span class="fu"><a href="https://dplyr.tidyverse.org/reference/arrange.html" class="external-link">arrange</a></span><span class="op">(</span><span class="va">dat</span>, <span class="va">time</span>, <span class="va">series</span><span class="op">)</span></span></span>
+<span class="r-in"><span></span></span>
+<span class="r-in"><span><span class="co"># Fit a model using the binomial() family; must specify observations</span></span></span>
+<span class="r-in"><span><span class="co"># and number of trials in the cbind() wrapper</span></span></span>
+<span class="r-in"><span><span class="va">mod</span> <span class="op">&lt;-</span> <span class="fu"><a href="mvgam.html">mvgam</a></span><span class="op">(</span><span class="fu"><a href="https://rdrr.io/r/base/cbind.html" class="external-link">cbind</a></span><span class="op">(</span><span class="va">y</span>, <span class="va">ntrials</span><span class="op">)</span> <span class="op">~</span> <span class="va">series</span> <span class="op">+</span> <span class="fu">s</span><span class="op">(</span><span class="va">x</span>, by <span class="op">=</span> <span class="va">series</span><span class="op">)</span>,</span></span>
+<span class="r-in"><span>             family <span class="op">=</span> <span class="fu"><a href="https://rdrr.io/r/stats/family.html" class="external-link">binomial</a></span><span class="op">(</span><span class="op">)</span>,</span></span>
+<span class="r-in"><span>             data <span class="op">=</span> <span class="va">dat</span><span class="op">)</span></span></span>
+<span class="r-in"><span><span class="fu"><a href="https://rdrr.io/r/base/summary.html" class="external-link">summary</a></span><span class="op">(</span><span class="va">mod</span><span class="op">)</span></span></span>
+<span class="r-in"><span></span></span>
 <span class="r-in"><span><span class="op">}</span></span></span>
 </code></pre></div>
     </div>
diff --git a/index.Rmd b/index.Rmd
index c73f1402..684d5c56 100644
--- a/index.Rmd
+++ b/index.Rmd
@@ -36,6 +36,7 @@ vembedr::embed_url("https://www.youtube.com/watch?v=0zZopLlomsQ")
 * `poisson()` for count data
 * `nb()` for overdispersed count data
 * `binomial()` for count data with known number of trials
+* `beta_binomial()` for overdispersed count data with known number of trials
 * `nmix()` for count data with imperfect detection (unknown number of trials)
 * `tweedie()` for overdispersed count data
   
diff --git a/index.md b/index.md
index 4bf1d367..8d3ec9b8 100644
--- a/index.md
+++ b/index.md
@@ -76,6 +76,8 @@ package can handle data for the following families:
 - `poisson()` for count data
 - `nb()` for overdispersed count data
 - `binomial()` for count data with known number of trials
+- `beta_binomial()` for overdispersed count data with known number of
+  trials
 - `nmix()` for count data with imperfect detection (unknown number of
   trials)
 - `tweedie()` for overdispersed count data
diff --git a/man/mvgam_families.Rd b/man/mvgam_families.Rd
index 8f17a88d..35e8091b 100644
--- a/man/mvgam_families.Rd
+++ b/man/mvgam_families.Rd
@@ -184,6 +184,33 @@ plot_predictions(mod, condition = list('temperature',
                 conf_level = 0.5) +
  ylab('Observed abundance') +
  theme_classic()
+
+# Example showcasing how cbind() is needed for Binomial observations
+# Simulate two time series of Binomial trials
+trials <- sample(c(20:25), 50, replace = TRUE)
+x <- rnorm(50)
+detprob1 <- plogis(-0.5 + 0.9*x)
+detprob2 <- plogis(-0.1 -0.7*x)
+dat <- rbind(data.frame(y = rbinom(n = 50, size = trials, prob = detprob1),
+                        time = 1:50,
+                        series = 'series1',
+                        x = x,
+                        ntrials = trials),
+             data.frame(y = rbinom(n = 50, size = trials, prob = detprob2),
+                        time = 1:50,
+                        series = 'series2',
+                        x = x,
+                        ntrials = trials))
+dat <- dplyr::mutate(dat, series = as.factor(series))
+dat <- dplyr::arrange(dat, time, series)
+
+# Fit a model using the binomial() family; must specify observations
+# and number of trials in the cbind() wrapper
+mod <- mvgam(cbind(y, ntrials) ~ series + s(x, by = series),
+             family = binomial(),
+             data = dat)
+summary(mod)
+
 }
 }
 \author{
diff --git a/src/mvgam.dll b/src/mvgam.dll
index 998d1fc3..5cea10f4 100644
Binary files a/src/mvgam.dll and b/src/mvgam.dll differ
diff --git a/tests/testthat/Rplots.pdf b/tests/testthat/Rplots.pdf
index a64551d6..4f1a6d83 100644
Binary files a/tests/testthat/Rplots.pdf and b/tests/testthat/Rplots.pdf differ
diff --git a/tests/testthat/test-binomial.R b/tests/testthat/test-binomial.R
index 6962ef64..42c23f9b 100644
--- a/tests/testthat/test-binomial.R
+++ b/tests/testthat/test-binomial.R
@@ -35,6 +35,7 @@ test_that("cbind() syntax required for binomial()", {
                'Binomial family requires cbind() syntax in the formula left-hand side',
                fixed = TRUE)
 
+  # Should work if correctly specified
   mod <- mvgam(cbind(y, ntrials) ~ s(series, bs = 're') +
                  gp(x, by = series, c = 5/4, k = 5),
                family = binomial(),
@@ -54,6 +55,64 @@ test_that("cbind() syntax required for binomial()", {
   expect_true(inherits(mod, 'mvgam_prefit'))
   expect_true(any(grepl('flat_ys ~ binomial_logit_glm(',
                         mod$model_file, fixed = TRUE)))
+
+  # Also with no predictors
+  mod <- mvgam(cbind(y, ntrials) ~ 0,
+               family = binomial(),
+               trend_model = AR(),
+               data = dat_train,
+               run_model = FALSE)
+  expect_true(inherits(mod, 'mvgam_prefit'))
+  expect_true(any(grepl('flat_ys ~ binomial_logit_glm(',
+                        mod$model_file, fixed = TRUE)))
+  expect_true(any(grepl("b[1] = 0;", mod$model_file,
+                        fixed = TRUE)))
+})
+
+# All tests should apply to beta_binomial as well
+test_that("cbind() syntax required for beta_binomial()", {
+  expect_error(mvgam(y ~ series + s(x, by = series),
+                     family = beta_binomial(),
+                     data = dat_train,
+                     run_model = FALSE),
+               'Binomial family requires cbind() syntax in the formula left-hand side',
+               fixed = TRUE)
+
+  # Should work if correctly specified
+  mod <- mvgam(cbind(y, ntrials) ~ s(series, bs = 're') +
+                 gp(x, by = series, c = 5/4, k = 5),
+               family = beta_binomial(),
+               data = dat_train,
+               run_model = FALSE)
+  expect_true(inherits(mod, 'mvgam_prefit'))
+  expect_true(any(grepl('flat_ys ~ beta_binomial(',
+                        mod$model_file, fixed = TRUE)))
+
+  # Also with a trend_formula
+  mod <- mvgam(cbind(y, ntrials) ~ series,
+               trend_formula = ~ s(x, by = trend),
+               family = beta_binomial(),
+               trend_model = AR(),
+               data = dat_train,
+               run_model = FALSE)
+  expect_true(inherits(mod, 'mvgam_prefit'))
+  expect_true(any(grepl('flat_ys ~ beta_binomial(',
+                        mod$model_file, fixed = TRUE)))
+
+  # Also with no predictors and with a prior on phi
+  mod <- mvgam(cbind(y, ntrials) ~ 0,
+               family = beta_binomial(),
+               priors = prior(normal(0, 3), class = phi),
+               trend_model = AR(),
+               data = dat_train,
+               run_model = FALSE)
+  expect_true(inherits(mod, 'mvgam_prefit'))
+  expect_true(any(grepl('beta_binomial(',
+                        mod$model_file, fixed = TRUE)))
+  expect_true(any(grepl("b[1] = 0;", mod$model_file,
+                        fixed = TRUE)))
+  expect_true(any(grepl("phi ~ normal(0, 3);", mod$model_file,
+                        fixed = TRUE)))
 })
 
 test_that("trials variable must be in data for binomial()", {
diff --git a/vignettes/mvgam_overview.Rmd b/vignettes/mvgam_overview.Rmd
index f1c3a55b..182b9e74 100644
--- a/vignettes/mvgam_overview.Rmd
+++ b/vignettes/mvgam_overview.Rmd
@@ -48,6 +48,7 @@ Here $\alpha$ are the unknown intercepts, the $\boldsymbol{s}$'s are unknown smo
 |Poisson (log link)           | `poisson()`     | Non-negative integers in $(0,1,2,...)$            | -                    |
 |Negative Binomial2 (log link)| `nb()`          | Non-negative integers in $(0,1,2,...)$            | $\phi$               |
 |Binomial (logit link)           | `binomial()` | Non-negative integers in $(0,1,2,...)$            | -                    |
+|Beta-Binomial (logit link)      | `beta_binomial()` | Non-negative integers in $(0,1,2,...)$       | $\phi$                   |
 |Poisson Binomial N-mixture (log link)| `nmix()`  | Non-negative integers in $(0,1,2,...)$            | -               |
 
 For all supported observation families, any extra parameters that need to be estimated (i.e. the $\sigma$ in a Gaussian model or the $\phi$ in a Negative Binomial model) are by default estimated independently for each series. However, users can opt to force all series to share extra observation parameters using `share_obs_params = TRUE` in `mvgam()`. Note that default link functions cannot currently be changed.