diff --git a/models/files/glmnet.R b/models/files/glmnet.R index 780e1f4e..db17b085 100644 --- a/models/files/glmnet.R +++ b/models/files/glmnet.R @@ -116,7 +116,7 @@ modelInfo <- list(label = "glmnet", if(length(lambda) > 1) stop("Only one value of lambda is allowed right now") if(!is.null(x$lambdaOpt)) { lambda <- x$lambdaOpt - } else stop("must supply a vaue of lambda") + } else stop("must supply a value of lambda") } allVar <- if(is.list(x$beta)) rownames(x$beta[[1]]) else rownames(x$beta) out <- unlist(predict(x, s = lambda, type = "nonzero")) @@ -132,14 +132,14 @@ modelInfo <- list(label = "glmnet", if(length(lambda) > 1) stop("Only one value of lambda is allowed right now") if(!is.null(object$lambdaOpt)) { lambda <- object$lambdaOpt - } else stop("must supply a vaue of lambda") + } else stop("must supply a value of lambda") } beta <- predict(object, s = lambda, type = "coef") if(is.list(beta)) { out <- do.call("cbind", lapply(beta, function(x) x[,1])) out <- as.data.frame(out) } else out <- data.frame(Overall = beta[,1]) - out <- out[rownames(out) != "(Intercept)",,drop = FALSE] + out <- abs(out[rownames(out) != "(Intercept)",,drop = FALSE]) out }, levels = function(x) if(any(names(x) == "obsLevels")) x$obsLevels else NULL, diff --git a/pkg/caret/DESCRIPTION b/pkg/caret/DESCRIPTION index b2073939..bff1a08f 100644 --- a/pkg/caret/DESCRIPTION +++ b/pkg/caret/DESCRIPTION @@ -1,6 +1,6 @@ Package: caret -Version: 6.0-51 -Date: 2015-07-12 +Version: 6.0-50 +Date: 2015-07-04 Title: Classification and Regression Training Author: Max Kuhn. Contributions from Jed Wing, Steve Weston, Andre Williams, Chris Keefer, Allan Engelhardt, Tony Cooper, Zachary Mayer, diff --git a/pkg/caret/inst/NEWS.Rd b/pkg/caret/inst/NEWS.Rd index dcbbbf33..9cf9d19b 100644 --- a/pkg/caret/inst/NEWS.Rd +++ b/pkg/caret/inst/NEWS.Rd @@ -6,7 +6,7 @@ \itemize{ \item A new model using the \cpkg{randomForest} and \cpkg{inTrees} packages called \code{rfRules} was added. A basic random forest model is used and then is decomposed into rules (of user-specified complexity). The \cpkg{inTrees} package is used to prune and optimize the rules. Thanks to Mirjam Jenny who suggested the workflow. \item Other new models (and their packages): \code{bartMachine} (\cpkg{bartMachine}), \code{rotationForest} (\cpkg{rotationForest}), \code{sdwd} (\cpkg{sdwd}), \code{loclda} (\cpkg{klaR}), \code{nnls} (\cpkg{nnls}), \code{svmLinear2} (\cpkg{e1071}), \code{rqnc} (\cpkg{rqPen}), and \code{rqlasso} (\cpkg{rqPen}) - \item When specifying your own resampling indices, a value of \code{method = "custom"} can be used with \code{trainControl} for better printing. + \item When specifying your own resampling indicies, a value of \code{method = "custom"} can be used with \code{trainControl} for better printing. \item Tim Lucas fixed a bug in \code{avNNet} when \code{bag = TRUE} \item Fixed a bug found by ruggerorossi in \code{method = "dnn"} with classification. \item A new option called \code{sampling} was added to \code{trainControl} that allows users to subsample their data in the case of a class imbalance. Another \href{http://topepo.github.io/caret/sampling.html}{help page} was added to explain the features. @@ -16,7 +16,7 @@ \item An internal function (\code{class2ind}) that can be used to make dummy variables for a single factor vector is now documented and exported. \item A bug was fixed in the \code{xyplot.lift} where the reference line was incorrectly computed. Thanks to Einat Sitbon for finding this. \item A bug related to calculating the Box-Cox transformation found by John Johnson was fixed. - } + } } \section{Changes in version 6.0-47}{ diff --git a/pkg/caret/inst/models/models.RData b/pkg/caret/inst/models/models.RData index 38d07c10..05a843aa 100644 Binary files a/pkg/caret/inst/models/models.RData and b/pkg/caret/inst/models/models.RData differ diff --git a/pkg/caret/tests/testthat/test_varImp.R b/pkg/caret/tests/testthat/test_varImp.R new file mode 100644 index 00000000..322a3fbe --- /dev/null +++ b/pkg/caret/tests/testthat/test_varImp.R @@ -0,0 +1,26 @@ +library(caret) + +context('Testing varImp') + +test_that('glmnet varImp returns non-negative values', { + skip_on_cran() +# library(glmnet, verbose=TRUE) + skip_if_not_installed('glmnet') + set.seed(1) + dat <- SLC14_1(200) + + reg <- train(y ~ ., data = dat, + method = "glmnet", + tuneGrid = data.frame(lambda = .1, alpha = .5), + trControl = trainControl(method = "none")) + + # this checks that some coefficients are negative + coefs <- predict(reg$finalModel, s=0.1, type="coef") + expect_less_than(0, sum(0 > coefs)) + # now check that all elements of varImp are nonnegative, + # in spite of negative coefficients + vis <- varImp(reg, s=0.1, scale=F)$importance + expect_equal(0, sum(0 > vis)) +}) + +