Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- am.glm <- glm(am ~ hp + wt, data=mtcars, family=binomial)
- newdata <- data.frame(hp=120, wt=2.8)
- p1 <- predict(am.glm, newdata, type="response")
- p2 <- 1/(1+exp(-(am.glm$coefficients[1] +
- am.glm$coefficients[2]*newdata[1,1] +
- am.glm$coefficients[3]*newdata[1,2])))
- p1 - p2
- ## 1
- ## 1.110223e-16
- library(OneR) # for bin and eval_model function
- mtcars_bin <- bin(mtcars)
- m <- glm(am ~ hp + wt, data = mtcars_bin, family = binomial)
- ## Warning: glm.fit: fitted probabilities numerically 0 or 1 occurred
- coefficients(m) #points for ranges
- ## (Intercept) hp(109,165] hp(165,222] hp(222,278] hp(278,335]
- ## 21.30781 19.62427 19.91155 40.03050 62.28971
- ## wt(2.3,3.08] wt(3.08,3.86] wt(3.86,4.64] wt(4.64,5.43]
- ## -20.61467 -62.03146 -62.78544 -81.47841
- prediction <- round(predict(m, type = 'response', mtcars_bin))
- eval_model(prediction, mtcars_bin$am)
- ##
- ## Confusion matrix (absolute):
- ## Actual
- ## Prediction 0 1 Sum
- ## 0 18 1 19
- ## 1 1 12 13
- ## Sum 19 13 32
- ##
- ## Confusion matrix (relative):
- ## Actual
- ## Prediction 0 1 Sum
- ## 0 0.56 0.03 0.59
- ## 1 0.03 0.38 0.41
- ## Sum 0.59 0.41 1.00
- ##
- ## Accuracy:
- ## 0.9375 (30/32)
- ##
- ## Error rate:
- ## 0.0625 (2/32)
- ##
- ## Error rate reduction (vs. base rate):
- ## 0.8462 (p-value = 1.452e-05)
- ##
- ## different scaling
- coefficients(m) * 20/log(2)
- ## (Intercept) hp(109,165] hp(165,222] hp(222,278] hp(278,335]
- ## 614.8136 566.2367 574.5260 1155.0360 1797.3012
- ## wt(2.3,3.08] wt(3.08,3.86] wt(3.86,4.64] wt(4.64,5.43]
- ## -594.8136 -1789.8496 -1811.6048 -2350.9701
Add Comment
Please, Sign In to add comment