From 608535475d5316253ce57871068269791c07305f Mon Sep 17 00:00:00 2001 From: ReeceGoding <67124261+ReeceGoding@users.noreply.github.com> Date: Wed, 14 Apr 2021 00:16:55 +0100 Subject: [PATCH] Removed magrittr dependency in callbacks.R Removed magrittr dependency in documentation. The simple path of replacing each call like `xgb.gblinear.history(bst) %>% matplot(type = 'l')` with its corresponding `matplot(xgb.gblinear.history(bst), type = 'l')` was taken. For some calls, I get different results when I run the code before and after this change. However, there is sufficient randomness in said code that I get different results each time that I run it, regardless of any changes. One thing that definitely has changed is that the plot y-axes now have titles matching the argument that they are called with. For example, where we previously had no titles on our y-axes that were generated using magrittr pipes, we now have something like "xgb.gblinear.history(bst)". If this is undesirable (and I don't see why it would be?), then the most consistent solution is to follow the `coef_path <- xgb.gblinear.history(bst)` example, which named its y-axes coef_path as a side-effect of the assignment just shown. Also corrected suspected typo: "forld". --- R-package/R/callbacks.R | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/R-package/R/callbacks.R b/R-package/R/callbacks.R index 37669988f2fa..ac3c576c81b3 100644 --- a/R-package/R/callbacks.R +++ b/R-package/R/callbacks.R @@ -560,7 +560,6 @@ cb.cv.predict <- function(save_models = FALSE) { #' # #' # In the iris dataset, it is hard to linearly separate Versicolor class from the rest #' # without considering the 2nd order interactions: -#' require(magrittr) #' x <- model.matrix(Species ~ .^2, iris)[,-1] #' colnames(x) #' dtrain <- xgb.DMatrix(scale(x), label = 1*(iris$Species == "versicolor")) @@ -581,7 +580,7 @@ cb.cv.predict <- function(save_models = FALSE) { #' bst <- xgb.train(param, dtrain, list(tr=dtrain), nrounds = 200, eta = 0.8, #' updater = 'coord_descent', feature_selector = 'thrifty', top_k = 1, #' callbacks = list(cb.gblinear.history())) -#' xgb.gblinear.history(bst) %>% matplot(type = 'l') +#' matplot(xgb.gblinear.history(bst), type = 'l') #' # Componentwise boosting is known to have similar effect to Lasso regularization. #' # Try experimenting with various values of top_k, eta, nrounds, #' # as well as different feature_selectors. @@ -590,7 +589,7 @@ cb.cv.predict <- function(save_models = FALSE) { #' bst <- xgb.cv(param, dtrain, nfold = 5, nrounds = 100, eta = 0.8, #' callbacks = list(cb.gblinear.history())) #' # coefficients in the CV fold #3 -#' xgb.gblinear.history(bst)[[3]] %>% matplot(type = 'l') +#' matplot(xgb.gblinear.history(bst)[[3]], type = 'l') #' #' #' #### Multiclass classification: @@ -603,15 +602,15 @@ cb.cv.predict <- function(save_models = FALSE) { #' bst <- xgb.train(param, dtrain, list(tr=dtrain), nrounds = 70, eta = 0.5, #' callbacks = list(cb.gblinear.history())) #' # Will plot the coefficient paths separately for each class: -#' xgb.gblinear.history(bst, class_index = 0) %>% matplot(type = 'l') -#' xgb.gblinear.history(bst, class_index = 1) %>% matplot(type = 'l') -#' xgb.gblinear.history(bst, class_index = 2) %>% matplot(type = 'l') +#' matplot(xgb.gblinear.history(bst, class_index = 0), type = 'l') +#' matplot(xgb.gblinear.history(bst, class_index = 1), type = 'l') +#' matplot(xgb.gblinear.history(bst, class_index = 2), type = 'l') #' #' # CV: #' bst <- xgb.cv(param, dtrain, nfold = 5, nrounds = 70, eta = 0.5, #' callbacks = list(cb.gblinear.history(FALSE))) -#' # 1st forld of 1st class -#' xgb.gblinear.history(bst, class_index = 0)[[1]] %>% matplot(type = 'l') +#' # 1st fold of 1st class +#' matplot(xgb.gblinear.history(bst, class_index = 0)[[1]], type = 'l') #' #' @export cb.gblinear.history <- function(sparse=FALSE) {