Advertisement
Guest User

Untitled

a guest
Mar 2nd, 2017
145
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
  1. from numpy import *
  2. import numpy as np
  3. from numpy.linalg import inv
  4. from math import *
  5. import matplotlib.pyplot as plt
  6.  
  7. # logistic function
  8. def logist(x):
  9.     return 1.0/(1 + np.exp(-x))
  10.    
  11. # logistic with regularization
  12. def lgr(X, y, lamb):
  13.     return dot(dot(inv(dot(X.transpose(), X) + lamb*eye(len(X[0]))), X.transpose()), y.reshape((len(y), 1)))
  14.  
  15. # 0/1 error
  16. def Err(w, x, y):
  17.     s = dot(w, x.transpose())
  18.     p = where(s>=0, 1.0, -1.0)
  19.     Eout = sum(p!=y)
  20.     return Eout / len(x)
  21.    
  22. #read data  
  23. train, test = loadtxt("hw4_train.dat"), loadtxt("hw4_test.dat")
  24. dim = len(train[0])
  25. x = c_[ones(len(train)), delete(train, s_[dim-1], axis = 1 )]
  26. y = delete(train, s_[0:dim-1 ], axis = 1 ).reshape((1,len(train)))[0]
  27. xt = c_[ones(len(test)), delete(test, s_[dim-1], axis = 1 )]
  28. yt = delete(test, s_[0:dim-1 ], axis = 1 ).reshape((1,len(test)))[0]
  29.  
  30. #No. 13
  31. lam = 1.126
  32. wreg = lgr(x, y, lam).reshape((1,3))[0]
  33. print("13."), print("Ein = ", Err(wreg, x, y),", Eout = ", Err(wreg, xt, yt)), print()
  34.  
  35. #No. 14 & 15
  36. A = range(2, -11, -1)
  37. lamset = power(10*ones(len(A)), array(A))
  38. Ein, Eout = zeros(len(A)), zeros(len(A))
  39. mEin, mEout, Eom, Eim, lmi, lmo  = 1, 1, 1, 1, 1,1
  40.  
  41. for i in range(len(A)):
  42.     wreg = lgr(x, y, lamset[i]).reshape((1,3))[0]
  43.     Ein[i] = Err(wreg, x, y)
  44.     Eout[i] = Err(wreg, xt, yt)
  45.     if Ein[i] < mEin:
  46.         mEin = Ein[i]
  47.         Eom = Eout[i]
  48.         lmi = i
  49.     if Eout[i] < mEout:
  50.         mEout = Eout[i]
  51.         Eim = Ein[i]
  52.         lmo = i
  53.        
  54. print("14."), print("Minimum Ein occurs when lambda =",lamset[lmi] ,", Ein =", mEin, ", Eout =",Eom), print()
  55. print("15."), print("Minimum Eout occurs when lambda =",lamset[lmo],", Eout =", mEout, ", Ein =",Eim), print()
  56.  
  57. fig = plt.figure()
  58. fig.suptitle("Relation between " + r"$\\lambda$" + " and "+r"$E_{in}$"+", "+r"$E_{out}$", fontsize=14, fontweight=\'bold\')
  59. ax = fig.add_subplot(111)
  60. plt.plot(lamset, Ein, \'k--\', label=r"$E_{in}$", color=\'#8F4586\')
  61. plt.plot(lamset, Eout, \'k\', label=r"$E_{out}$", color=\'#64A600\')
  62. legend = plt.legend(loc=\'upper left\', shadow=True)
  63. ax.annotate(\'Minimum \'+r"$E_{in}$"+\' = \' + str(mEin), xy=(lamset[lmi], mEin), xytext=(lamset[lmi]*10, mEin+0.07),
  64. arrowprops=dict(facecolor=\'black\', shrink=0.05))
  65. ax.annotate(\'Minimum \'+r"$E_{out}$"+\' = \' + str(mEout), xy=(lamset[lmo], mEout), xytext=(lamset[lmo]*10, mEin+0.05),
  66. arrowprops=dict(facecolor=\'black\', shrink=0.05))
  67. plt.ylabel(\'Expected Value of 0/1 Error\'), plt.xlabel(r"$\\lambda$")
  68. ax.set_xscale(\'log\')
  69. legend.get_frame().set_facecolor(\'#CAFFFF\')
  70.  
  71.  
  72. #No. 16 ~ 17
  73. xtr, xv = x[0:120], x[120:200]
  74. ytr, yv = y[0:120], y[120:200]
  75. Etr, Ev, Eout = zeros(len(A)), zeros(len(A)), zeros(len(A))
  76. mEtr, Etro, mEv, Etrv, lmtr, lmv  = 1, 1, 1, 1, 1, 1
  77.  
  78. for i in range(len(A)):
  79.    wreg = lgr(xtr, ytr, lamset[i]).reshape((1,3))[0]
  80.    Etr[i] = Err(wreg, xtr, ytr)
  81.    Ev[i] = Err(wreg, xv, yv)
  82.    Eout[i] = Err(wreg, xt, yt)
  83.    if Etr[i] < mEtr:
  84.        mEtr = Etr[i]
  85.        Etro = Eout[i]
  86.        lmtr = i
  87.    if Ev[i] < mEv:
  88.        mEv = Ev[i]
  89.        Evo = Eout[i]
  90.        lmv = i
  91.      
  92. print("16."), print("Minimum Etrain occurs when lambda =",lamset[lmtr] ,", Etrain =", mEtr, ", Eout =",Etro), print()
  93. print("17."), print("Minimum Eval occurs when lambda =",lamset[lmv],", Eval =", mEv, ", Eout =",Evo), print()
  94.  
  95. fig2 = plt.figure()
  96. fig2.suptitle("Relation between " + r"$\\lambda$" + " and "+ r"$E_{train}$"+", "+ r"$E_{val}$", fontsize=14, fontweight=\'bold\')
  97. ax = fig2.add_subplot(111)
  98. plt.plot(lamset, Etr, \'k--\', label=r"$E_{train}$", color=\'#0000E3\')
  99. plt.plot(lamset, Ev, \'k\', label=r"$E_{val}$", color=\'#64A600\')
  100. legend = plt.legend(loc=\'upper left\', shadow=True)
  101. ax.annotate(\'Minimum \' +r"$E_{train}$"+\' = \' + str(mEin), xy=(lamset[lmtr], mEtr), xytext=(lamset[lmtr]*10, mEtr+0.09),
  102. arrowprops=dict(facecolor=\'black\', shrink=0.05))
  103. ax.annotate(\'Minimum \' +r"$E_{val}$"+\' = \' + str(mEout), xy=(lamset[lmv], mEv), xytext=(lamset[lmv]/100, mEv+0.09),
  104. arrowprops=dict(facecolor=\'black\', shrink=0.05))
  105. plt.ylabel(\'Expected Value of 0/1 Error\'), plt.xlabel(r"$\\lambda$")
  106. ax.set_xscale(\'log\')
  107. legend.get_frame().set_facecolor(\'#CAFFFF\')
  108.  
  109. #No. 18
  110. wreg = lgr(x, y, lamset[lmv]).reshape((1,3))[0]
  111. print("18."), print("For the wreg with optimal lambda achievd in 17. and trained in whole data:")
  112. print("Ein = ", Err(wreg, x, y),", Eout = ", Err(wreg, xt, yt)), print()
  113.  
  114. #No. 19
  115. Ecv, mEcv, lcv = zeros(len(A)), 1, 1
  116. for i in range(len(A)):
  117.    for j in range(5):        
  118.        ftrx, ftry = delete(x, s_[40*j:40*(j+1)], axis = 0 ), delete(y, s_[40*j:40*(j+1)], axis = 0 )
  119.        ftex, ftey = x[40*j:40*(j+1)], y[40*j:40*(j+1)]
  120.        wreg = lgr(ftrx, ftry, lamset[i]).reshape((1,3))[0]
  121.        Ecv[i] += Err(wreg, ftex, ftey)
  122.    Ecv[i] /= 5
  123.    if Ecv[i] < mEcv:
  124.        mEcv = Ecv[i]
  125.        lcv = i
  126.      
  127. print("19."), print("Minimum Ecv occurs when lambda =",lamset[lcv] ,", Ecv =", mEcv), print()  
  128.    
  129. fig3 = plt.figure()
  130. fig3.suptitle("Relation between " + r"$\\lambda$" + " and " + r"$E_{cv}$", fontsize=14, fontweight=\'bold\')
  131. ax = fig3.add_subplot(111)
  132. plt.plot(lamset, Ecv, \'k\', label=\'Etrain\', color=\'#00A600\')
  133. ax.annotate(\'Minimum \'+ r"$E_{cv}$"+\' = \' + str(mEcv), xy=(lamset[lcv], mEcv), xytext=(lamset[lcv]*10, mEcv+0.09),
  134. arrowprops=dict(facecolor=\'black\', shrink=0.05))
  135. plt.ylabel(r\'$E_{cv}$\'+\' (Expected Value of 0/1 Error)\'), plt.xlabel(r"$\\lambda$")
  136. ax.set_xscale(\'log\')
  137.  
  138. #No. 20
  139. wreg = lgr(x, y, lamset[lcv]).reshape((1,3))[0]
  140. print("20."), print("For the wreg with optimal lambda achievd in 19. and trained in whole data:")
  141. print("Ein = ", Err(wreg, x, y),", Eout = ", Err(wreg, xt, yt)), print()
  142.  
  143. plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement