SHARE
TWEET

Untitled

a guest Nov 14th, 2017 46 Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. am.glm <- glm(am ~ hp + wt, data=mtcars, family=binomial)
  2. newdata <- data.frame(hp=120, wt=2.8)
  3. p1 <- predict(am.glm, newdata, type="response")
  4. p2 <- 1/(1+exp(-(am.glm$coefficients[1] +
  5.              am.glm$coefficients[2]*newdata[1,1] +
  6.                  am.glm$coefficients[3]*newdata[1,2])))
  7. p1 - p2
  8. ##            1
  9. ## 1.110223e-16
  10.    
  11. library(OneR) # for bin and eval_model function
  12. mtcars_bin <- bin(mtcars)
  13. m <- glm(am ~ hp + wt, data = mtcars_bin, family = binomial)
  14. ## Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
  15. coefficients(m) #points for ranges
  16. ##   (Intercept)   hp(109,165]   hp(165,222]   hp(222,278]   hp(278,335]
  17. ##      21.30781      19.62427      19.91155      40.03050      62.28971
  18. ##  wt(2.3,3.08] wt(3.08,3.86] wt(3.86,4.64] wt(4.64,5.43]
  19. ##     -20.61467     -62.03146     -62.78544     -81.47841
  20. prediction <- round(predict(m, type = 'response', mtcars_bin))
  21. eval_model(prediction, mtcars_bin$am)
  22. ##
  23. ## Confusion matrix (absolute):
  24. ##           Actual
  25. ## Prediction  0  1 Sum
  26. ##        0   18  1  19
  27. ##        1    1 12  13
  28. ##        Sum 19 13  32
  29. ##
  30. ## Confusion matrix (relative):
  31. ##           Actual
  32. ## Prediction    0    1  Sum
  33. ##        0   0.56 0.03 0.59
  34. ##        1   0.03 0.38 0.41
  35. ##        Sum 0.59 0.41 1.00
  36. ##
  37. ## Accuracy:
  38. ## 0.9375 (30/32)
  39. ##
  40. ## Error rate:
  41. ## 0.0625 (2/32)
  42. ##
  43. ## Error rate reduction (vs. base rate):
  44. ## 0.8462 (p-value = 1.452e-05)
  45. ##
  46. ## different scaling
  47. coefficients(m) * 20/log(2)
  48. ##   (Intercept)   hp(109,165]   hp(165,222]   hp(222,278]   hp(278,335]
  49. ##      614.8136      566.2367      574.5260     1155.0360     1797.3012
  50. ##  wt(2.3,3.08] wt(3.08,3.86] wt(3.86,4.64] wt(4.64,5.43]
  51. ##     -594.8136    -1789.8496    -1811.6048    -2350.9701
RAW Paste Data
Top