Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error reproducing Parsnip example #90

Open
GeneGeek opened this issue Feb 3, 2025 · 0 comments
Open

Error reproducing Parsnip example #90

GeneGeek opened this issue Feb 3, 2025 · 0 comments

Comments

@GeneGeek
Copy link

GeneGeek commented Feb 3, 2025

How to build a custom model

mda_tune_res <- mda_spec %>% 
+   tune_grid(Class ~ ., cv, grid = 4) 
Error in `mutate()`:
ℹ In argument: `object = purrr::map(call_info, eval_call_info)`.
Caused by error in `purrr::map()`:
ℹ In index: 1.
Caused by error in `.f()`:
! Error when calling bar(): Error in loadNamespace(x) : there is no package called ‘foo’
Run `rlang::last_trace()` to see where the error occurred.
> rlang::last_trace()
<error/dplyr:::mutate_error>
Error in `mutate()`:
ℹ In argument: `object = purrr::map(call_info, eval_call_info)`.
Caused by error in `purrr::map()`:
ℹ In index: 1.
Caused by error in `.f()`:
! Error when calling bar(): Error in loadNamespace(x) : there is no package called ‘foo’
Backtrace:
     ▆
  1. ├─mda_spec %>% tune_grid(Class ~ ., cv, grid = 4)
  2. ├─tune::tune_grid(., Class ~ ., cv, grid = 4)
  3. ├─tune:::tune_grid.model_spec(., Class ~ ., cv, grid = 4)
  4. │ ├─tune::tune_grid(...)
  5. │ └─tune:::tune_grid.workflow(...)
  6. │   └─tune:::tune_grid_workflow(...)
  7. │     └─tune::check_parameters(...)
  8. │       ├─hardhat::extract_parameter_set_dials(wflow)
  9. │       └─workflows:::extract_parameter_set_dials.workflow(wflow)
 10. │         ├─hardhat::extract_parameter_set_dials(model)
 11. │         └─parsnip:::extract_parameter_set_dials.model_spec(model)
 12. │           └─... %>% ...
 13. ├─dplyr::mutate(., object = purrr::map(call_info, eval_call_info))
 14. ├─dplyr:::mutate.data.frame(., object = purrr::map(call_info, eval_call_info))
 15. │ └─dplyr:::mutate_cols(.data, dplyr_quosures(...), by)
 16. │   ├─base::withCallingHandlers(...)
 17. │   └─dplyr:::mutate_col(dots[[i]], data, mask, new_columns)
 18. │     └─mask$eval_all_mutate(quo)
 19. │       └─dplyr (local) eval()
 20. └─purrr::map(call_info, eval_call_info)
 21.   └─purrr:::map_("list", .x, .f, ..., .progress = .progress)
 22.     ├─purrr:::with_indexed_errors(...)
 23.     │ └─base::withCallingHandlers(...)
 24.     ├─purrr:::call_with_cleanup(...)
 25.     └─parsnip (local) .f(.x[[i]], ...)
 26.       └─base::stop(paste0("Error when calling ", x$fun, "(): ", as.character(res)))

Here's the full code in the example

library(tidymodels)  
library(modeldata)  
library(mda)  

set_new_model("discrim_mixture")
set_model_mode(model = "discrim_mixture", mode = "classification")
set_model_engine(
  "discrim_mixture", 
  mode = "classification", 
  eng = "mda"
)

set_dependency("discrim_mixture", eng = "mda", pkg = "mda")

set_model_arg(
  model = "discrim_mixture",
  eng = "mda",
  parsnip = "sub_classes",
  original = "subclasses",
  func = list(pkg = "foo", fun = "bar"),
  has_submodel = FALSE
)

discrim_mixture <-
  function(mode = "classification",  sub_classes = NULL) { 
    if (mode  != "classification") { 
      rlang::abort("`mode` should be 'classification'.")
    }
    
    args <- list(sub_classes = rlang::enquo(sub_classes))
    
    new_model_spec(
      "discrim_mixture",
      args = args, # Argument names (`sub_classes`) should be defaulted to NULL  
      eng_args = NULL, 
      mode = mode,
      method = NULL,
      engine = NULL
    )
  }

set_fit(
  model = "discrim_mixture",
  eng = "mda",
  mode = "classification",
  value = list(
    interface = "formula",
    protect = c("formula", "data"),
    func = c(pkg = "mda", fun = "mda"),
    defaults = list()
  )
)

set_encoding(
  model = "discrim_mixture",
  eng = "mda",
  mode = "classification",
  options = list(
    predictor_indicators = "traditional",
    compute_intercept = TRUE,
    remove_intercept = TRUE,
    allow_sparse_x = FALSE
  )
)

class_info <- 
  list(
    pre = NULL, 
    post = NULL, 
    func = c(fun = "predict"), 
    args =
      list(
        object = rlang::expr(object$fit),
        newdata = rlang::expr(new_data),
        type = "class"
      )
  )

set_pred(
  model = "discrim_mixture",
  eng = "mda",
  mode = "classification",
  type = "class",
  value = class_info
)

prob_info <-
  pred_value_template(
    post = function(x, object) {
      tibble::as_tibble(x)
    },
    func = c(fun = "predict"),
    object = rlang::expr(object$fit),
    newdata = rlang::expr(new_data),
    type = "posterior"
  )

set_pred(
  model = "discrim_mixture",
  eng = "mda",
  mode = "classification",
  type = "prob",
  value = prob_info
)

data("two_class_dat", package = "modeldata")
set.seed(4622)

example_split <- initial_split(two_class_dat, prop = 0.99)
example_train <- training(example_split)
example_test  <-  testing(example_split)

mda_spec <- discrim_mixture(sub_classes = 2) %>% 
  set_engine("mda")

mda_fit <- mda_spec %>%
  fit(Class ~ ., data = example_train)

# Predict Class without Probabilities 
predict(mda_fit, new_data = example_test) %>% 
  bind_cols(example_test %>% select(Class))

# Predict Class with Probabilities 
predict(mda_fit, new_data = example_test, type = "prob") %>%
  bind_cols(example_test %>% select(Class))

sub_classes <- function(range = c(1L, 10L),
                        trans = NULL) {
  new_quant_param(
    type = "integer",
    range = range,
    inclusive = c(TRUE, TRUE),
    trans = trans,
    label = c(sub_classes = "# Sub-Classes"),
    finalize = NULL
  )
}

mda_spec <- 
  discrim_mixture(sub_classes = tune()) %>% 
  set_engine("mda")

set.seed(452)

cv <- vfold_cv(example_train)

mda_tune_res <- mda_spec %>% 
  tune_grid(Class ~ ., cv, grid = 4)  

rlang::last_trace()
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant