roninkoi

Fisher's linear discriminant

Mar 1st, 2021
993
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 1.12 KB | None | 0 0
  1. import numpy as np
  2. from scipy.stats import norm
  3.  
  4. # Separates two classes and calculates
  5. # rejection criteria and probability.
  6. # Input: classes a and b, fraction of class a
  7. # fa, data point x = (x1, x2)
  8. # Output: a rejection criterion a0,
  9. # b selection efficiency be,
  10. # probabilities for classes pa and pb.
  11. def fdisc(a, b, fa, x1, x2):
  12.     acm = np.cov(a[:,0], a[:,1]) # covariances
  13.     bcm = np.cov(b[:,0], b[:,1])
  14.  
  15.     apb = acm + bcm # sum of covariances
  16.     apbi = np.linalg.inv(apb) # invert
  17.  
  18.     # means for classes a and b
  19.     am = np.array((np.mean(a[:,0]), np.mean(a[:,1])))
  20.     bm = np.array((np.mean(b[:,0]), np.mean(b[:,1])))
  21.  
  22.     c = apbi @ (am - bm).T # projection vector
  23.  
  24.     anew = c @ a.T # x3 data
  25.     bnew = c @ b.T
  26.  
  27.     anew = np.sort(anew)
  28.     bnew = np.sort(bnew)
  29.     a0 = anew[int((1. - fa) * len(anew))] # pick value based on fraction
  30.     be = len(bnew[bnew < a0]) / len(bnew) # efficiency
  31.  
  32.     xnew = c @ (x1, x2) # point (x1, x2)
  33.     pa = norm.cdf(xnew, c @ am, np.std(anew)) # calculate probabilities
  34.     pb = 1.-norm.cdf(xnew, c @ bm, np.std(bnew))
  35.  
  36.     return a0, be, pa, pb
  37.  
Advertisement
Add Comment
Please, Sign In to add comment