Required packages

library(aod)
library(MASS)
library(car)

Load data

dt <- read.table("./data/CH14PR13.txt", 
                col.names =  c("label", "income", "autoyear"))
dt$label = as.factor(dt$label)

Problem 14.13 a

fit.logr <- glm(label ~ income+autoyear, family=binomial("logit"),data = dt)
summary(fit.logr)
## 
## Call:
## glm(formula = label ~ income + autoyear, family = binomial("logit"), 
##     data = dt)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.6189  -0.8949  -0.5880   0.9653   2.0846  
## 
## Coefficients:
##             Estimate Std. Error z value Pr(>|z|)  
## (Intercept) -4.73931    2.10195  -2.255   0.0242 *
## income       0.06773    0.02806   2.414   0.0158 *
## autoyear     0.59863    0.39007   1.535   0.1249  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 44.987  on 32  degrees of freedom
## Residual deviance: 36.690  on 30  degrees of freedom
## AIC: 42.69
## 
## Number of Fisher Scoring iterations: 4

Problem 14.13 b

Problem 14.13 c

\[\hat{\pi}= \frac{\exp(X'b)}{1+\exp(X'b)}=[1+\exp(-X'b)]^{-1}= [1+\exp(4.73931-0.06773 X_1-0.59863X_2)]^{-1} = 0.6089927.\]

Problem 14.19 b

coef(fit.logr)
## (Intercept)      income    autoyear 
## -4.73930950  0.06773256  0.59863166
wald.test(b = coef(fit.logr), Sigma = vcov(fit.logr), Terms = 3)
## Wald test:
## ----------
## 
## Chi-squared test:
## X2 = 2.4, df = 1, P(> X2) = 0.12

Problem 14.21 a

fit.logr2 <- glm(label ~ polym(income, degree=2)+polym(autoyear, degree=2), family=binomial("logit"),data = dt)
summary(fit.logr2)
## 
## Call:
## glm(formula = label ~ polym(income, degree = 2) + polym(autoyear, 
##     degree = 2), family = binomial("logit"), data = dt)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.7571  -0.8346  -0.5888   0.9257   2.2011  
## 
## Coefficients:
##                              Estimate Std. Error z value Pr(>|z|)  
## (Intercept)                   -0.3171     0.4456  -0.712   0.4767  
## polym(income, degree = 2)1    10.0558     4.5620   2.204   0.0275 *
## polym(income, degree = 2)2     1.0828     3.4545   0.313   0.7539  
## polym(autoyear, degree = 2)1   5.5885     3.5022   1.596   0.1106  
## polym(autoyear, degree = 2)2  -2.8765     2.7050  -1.063   0.2876  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 44.987  on 32  degrees of freedom
## Residual deviance: 35.290  on 28  degrees of freedom
## AIC: 45.29
## 
## Number of Fisher Scoring iterations: 5
forwards = step(fit.logr2,
scope=label ~ polym(income, degree=2)+polym(autoyear, degree=2), direction="forward")
## Start:  AIC=45.29
## label ~ polym(income, degree = 2) + polym(autoyear, degree = 2)
summary(forwards)
## 
## Call:
## glm(formula = label ~ polym(income, degree = 2) + polym(autoyear, 
##     degree = 2), family = binomial("logit"), data = dt)
## 
## Deviance Residuals: 
##     Min       1Q   Median       3Q      Max  
## -1.7571  -0.8346  -0.5888   0.9257   2.2011  
## 
## Coefficients:
##                              Estimate Std. Error z value Pr(>|z|)  
## (Intercept)                   -0.3171     0.4456  -0.712   0.4767  
## polym(income, degree = 2)1    10.0558     4.5620   2.204   0.0275 *
## polym(income, degree = 2)2     1.0828     3.4545   0.313   0.7539  
## polym(autoyear, degree = 2)1   5.5885     3.5022   1.596   0.1106  
## polym(autoyear, degree = 2)2  -2.8765     2.7050  -1.063   0.2876  
## ---
## Signif. codes:  0 '***' 0.001 '**' 0.01 '*' 0.05 '.' 0.1 ' ' 1
## 
## (Dispersion parameter for binomial family taken to be 1)
## 
##     Null deviance: 44.987  on 32  degrees of freedom
## Residual deviance: 35.290  on 28  degrees of freedom
## AIC: 45.29
## 
## Number of Fisher Scoring iterations: 5

Problem 14.21 b

step <- stepAIC(fit.logr2, direction="backward")
## Start:  AIC=45.29
## label ~ polym(income, degree = 2) + polym(autoyear, degree = 2)
## 
##                               Df Deviance    AIC
## - polym(autoyear, degree = 2)  2   39.091 45.091
## <none>                             35.290 45.290
## - polym(income, degree = 2)    2   44.700 50.700
## 
## Step:  AIC=45.09
## label ~ polym(income, degree = 2)
## 
##                             Df Deviance    AIC
## <none>                           39.091 45.091
## - polym(income, degree = 2)  2   44.987 46.987

Problem 14.31 b

delta_X2 = residuals(fit.logr, type = "pearson")^2
delta_dev2 = residuals(fit.logr, type = "deviance")^2
cooked = cooks.distance(fit.logr)
table= cbind(delta_X2,delta_dev2,cooked)
table
##      delta_X2 delta_dev2      cooked
## 1  0.46025598 0.75722350 0.007287618
## 2  0.61013149 0.95263170 0.015487481
## 3  0.59338747 0.93172447 0.022007635
## 4  0.57645850 0.91036176 0.034632273
## 5  0.52127958 0.83910362 0.011141676
## 6  0.62804934 0.97476515 0.047271817
## 7  0.13371774 0.25100454 0.007232107
## 8  0.43705937 0.72519784 0.029744556
## 9  2.70752691 2.62073011 0.127322468
## 10 0.06792509 0.13143520 0.003071869
## 11 0.14466564 0.27022516 0.006890961
## 12 0.72341852 1.08861966 0.036467012
## 13 0.30655201 0.53478323 0.006056811
## 14 0.69453015 1.05481101 0.020081317
## 15 0.49251030 0.80091894 0.007562268
## 16 0.33530576 0.57832060 0.018701632
## 17 0.55452674 0.88234230 0.021356151
## 18 0.15572035 0.28944766 0.004033318
## 19 3.08205539 2.81320127 0.075808690
## 20 1.40860079 1.75809199 0.194451860
## 21 0.32803490 0.56740066 0.006242799
## 22 1.77318040 2.03998964 0.025949339
## 23 1.26378596 1.63407723 0.018434634
## 24 0.18872634 0.34576487 0.005419367
## 25 0.26771482 0.47443185 0.005680174
## 26 3.77648772 3.12741098 0.099379479
## 27 0.25018227 0.44657872 0.005486655
## 28 0.63176835 0.97932861 0.031166825
## 29 7.78290579 4.34561462 0.247931162
## 30 1.80333955 2.06162281 0.069450919
## 31 0.21848658 0.39521917 0.005086679
## 32 0.65619922 1.00905070 0.040135710
## 33 0.05032646 0.09820207 0.001642341
Copyright © 2017 Ming Chen & Wenqiang Feng. All rights reserved.