Guest User

Untitled

a guest
Jun 22nd, 2018
66
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
text 3.40 KB | None | 0 0
  1. # install.packages("ordinalForest")
  2. library(ordinalForest)
  3. #
  4. # start by naming my method to pass to train
  5. #
  6. ordinalForest <- list(type = "Classification",
  7. library = "ordinalForest",
  8. loop = NULL)
  9. #
  10. # define the tuning parameters
  11. #
  12. prm <- data.frame(parameter = c("nsets", "ntreeperdiv", "ntreefinal", "npermtrial", "nbest"),
  13. class = rep("numeric", 5),
  14. label = c("Score sets", "Number of Trees (small)", "Number of Trees (final)", "Tried score sets" ,"Best score sets" ))
  15. #
  16. # append them to the list
  17. #
  18. ordinalForest$parameters <- prm
  19. #
  20. # define the default training grid. some models can do
  21. # a random search, but I wont implement that
  22. #
  23. ordinalForestGrid <- function(x, y, len = NULL, search = grid) {
  24. if(search == grid) {
  25. out <- expand.grid(nsets = 1000,
  26. ntreeperdiv = 100,
  27. ntreefinal = 5000,
  28. npermtrial = 500,
  29. nbest = 10)
  30. } else {
  31. stop('random search not yet implemented')
  32. }
  33. out
  34. }
  35. #
  36. # append to list
  37. #
  38. ordinalForest$grid <- ordinalForestGrid
  39.  
  40. #
  41. # define the fitting function. Here, it is the
  42. # ordfor constructor function ordfor()
  43. # I notice that the function doesnt allow for ordfor(x, y, ...), so I have to get around that
  44. function(x, y, param, lev, last, classProbs, ...) {
  45. library(ordinalForest)
  46.  
  47. data <- data.frame(x, Class = y)
  48.  
  49. ordfor(depvar = "Class", data, nsets = param$nsets,
  50. ntreeperdiv = param$ntreeperdiv, ntreefinal = param$ntreefinal,
  51. perffunction = c("equal"), classimp,
  52. classweights, nbest = param$nbest, naive = FALSE, num.threads = NULL,
  53. npermtrial = param$npermtrial, permperdefault = FALSE)
  54.  
  55. }
  56.  
  57. #
  58. # append to list
  59. #
  60.  
  61. ordinalForest$fit <- ordinalForestFit
  62.  
  63. #
  64. # define the levels of the outcome.
  65. # they are held in the classes slot of objects of
  66. # class ordfor
  67. #
  68.  
  69. ordinalForest$levels <- function(x) x@classes
  70.  
  71. #
  72. # define the classification prediction with the
  73. # predict generic
  74. #
  75. ordinalForestPred <- function(modelFit, newdata, preProc = NULL, submodels = NULL) {
  76. predict(modelFit, newdata)
  77. }
  78. #
  79. # append to list
  80. #
  81. ordinalForest$predict <- ordinalForestPred
  82. #
  83. # define the class probability with the
  84. # predict generic
  85. #
  86. ordinalForestProb <- function(modelFit, newdata, preProc = NULL, submodels = NULL) {
  87. predict(modelFit, newdata, type = "prob")
  88. }
  89. #
  90. # append to list
  91. #
  92. ordinalForest$prob <- ordinalForestProb
  93. #
  94. # define the sort function, i.e. how the tuning parameters
  95. # are ordered in case similar performance obtained
  96. #
  97. ordinalForestSort <- function (x) x[order(x$nsets, x$ntreeperdiv, x$ntreefinal,
  98. x$npermtrial, x$nbest), ]
  99. #
  100. # append to list
  101. #
  102. ordinalForest$sort <- ordinalForestSort
  103.  
  104. # TEST TEST TEST ----------------------------------------------------------
  105.  
  106. # load caret and doParallel library
  107. library(caret)
  108. library(doParallel)
  109. data("hearth")
  110. # register cores for parallel processing
  111. #
  112. #
  113. # train control options. want repeated 10-fold CV
  114. #
  115. # define grid of parameter values
  116. #
  117. tuneGrid <- expand.grid(nsets = 1000,
  118. ntreeperdiv = 100,
  119. ntreefinal = 5000,
  120. npermtrial = 500,
  121. nbest = 10)
  122.  
  123. set.seed(825)
  124. OFTune <- train(x = hearth[,-11],
  125. y = hearth[,11],
  126. method = ordinalForestFit,
  127. tuneGrid = tuneGrid)
  128.  
  129.  
  130. devtools::session_info()
Add Comment
Please, Sign In to add comment