Advertisement
Guest User

Label Prop

a guest
Oct 9th, 2015
84
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 5.05 KB | None | 0 0
  1. import numpy as np
  2. from scipy.sparse import csr_matrix
  3. import scipy.sparse
  4. from Kaczmarz_heap import runKaczmarz_heap
  5. import Kaczmarz as KZ
  6. import sample_descent as sd
  7. import plot_experiment as plt
  8. import legal_selection as LE
  9. import heap_selection as HE
  10. from pretty_plot_ import pretty_plot
  11. from scipy.io import loadmat
  12. #import FastKaczmark as FKZ
  13. import scipy.sparse as sp
  14. import scipy.io as sio
  15. from numba import autojit
  16. from numba import float64, int64
  17.  
  18.  
  19.  
  20. def save_sparse_csr(filename,array):
  21.     np.savez(filename,data = array.data ,indices=array.indices,
  22.              indptr =array.indptr, shape=array.shape )
  23.  
  24. def load_sparse_csr(filename):
  25.     loader = np.load(filename)
  26.     return csr_matrix((  loader['data'], loader['indices'], loader['indptr']),
  27.                          shape = loader['shape'])
  28.  
  29. def get_leastsq_formulation(X,y):
  30.     n, m = X.shape
  31.     I = np.eye(n)
  32.     Z = np.zeros((m,m))
  33.     A = np.vstack([np.hstack([X,-I]),np.hstack([Z,X.T])])
  34.     z = np.zeros((1,3)).flatten()
  35.     b = np.hstack([y,z])
  36.     return A, b
  37.  
  38. @autojit("uint8[:,:], float64[:,:],int64[:],int64,int64", nopython=True)
  39. def get_label_prop_A(W,A,S,s,n):
  40.     for i in range(s):
  41.         for j in range(s):
  42.             A[i,j] = -(W[S[j],S[i]]+W[S[i],S[j]])
  43.         for j in range(n):
  44.             A[i,i] += W[j,S[i]] + W[S[i],j]
  45.     return
  46.  
  47.  
  48. @autojit("uint8[:,:], uint8[:,:],float64[:],int64[:],int64,int64[:],int64,int64", nopython=True)
  49. def get_label_prop_B(W,y,b,S,s,S_k,s_k,n):
  50.     for i in range(s):
  51.         for j in range(s_k):
  52.             b[i] += (W[S[i],S_k[j]] + W[S_k[j],S[i]])*y[S_k[j]]    
  53.     return
  54.  
  55. def main():
  56.     M = sio.loadmat("/home/alimv/Downloads/Sara/labelprop.mat")
  57.     y = M["y"]
  58.     W = M["W"]
  59.     rows, column = W.shape
  60.     s = np.sum(y==0)
  61.     S = np.nonzero(y == 0)[0]
  62.     assert S.shape[0] == s
  63.     A = np.zeros((s,s))
  64.     get_label_prop_A(W,A,S,s,rows)
  65.     s_k = np.sum(y!=0)
  66.     S_k = np.nonzero(y != 0)[0]
  67.     assert S_k.shape[0] == s_k
  68.     b = np.zeros(s)
  69.     get_label_prop_B(W,y,b,S,s,S_k,s_k,rows)
  70.     print b.shape
  71.     print A.shape
  72.     return
  73.  
  74.     n_samples = 5000
  75.     n_features = 1000
  76.     #A = csr_matrix(scipy.sparse.rand(m, n,(np.log(m)*np.log(n))/(n*m), random_state = np.random.RandomState(seed = 6666)))
  77.  
  78.     # O(log n) each column, stacking columns
  79.     sparse_1 = sp.rand(n_samples, 1, density=np.log(n_samples)/(n_features*n_samples))
  80.     for i in range(n_features-1):
  81.         sparse_1 = sp.hstack([sparse_1, sp.rand(n_samples, 1, density=np.log(n_samples)/(2*n_samples))])
  82.  
  83.     # O(log m) each row, stacking rows
  84.     sparse_2 = sp.rand(1, n_features, density=np.log(n_features)/(n_features*n_samples))
  85.     for j in range(n_samples-1):
  86.         sparse_2 = sp.vstack([sparse_2, sp.rand(1,n_features, density=np.log(n_features)/(2*n_features))])
  87.  
  88.     A  = csr_matrix(sparse_1 + sparse_2)
  89.  
  90.     np.random.seed(6666)
  91.     x_true = np.random.randn(n_features)
  92.     b = A.dot(x_true.T)
  93.  
  94.     #import scipy.io as sio
  95.     #M = sio.loadmat("/home/alimv/Downloads/Sara/sara.mat")
  96.     #A = M["A"].tocsr()
  97.  
  98.  
  99.  
  100.  
  101.    
  102.  
  103.     #for algorithm in ["uniform","non_uniform","greedy_res","greedy_dis","legal_rule", "legal_rule_uni"]:
  104.     # ["cycle", "uniform", "heap_rule_distance", "heap_rule_residual", "non_uniform"]:
  105.     results = []
  106.  
  107.     for algorithm in ["non_uniform"]:
  108.         print algorithm
  109.         model = sd.Kaczmarz(n_epochs=10000, algorithm=algorithm)
  110.         model.fit(A, b)
  111.  
  112.         results.append((model.results, algorithm))
  113.  
  114.  
  115.     #for (y_axis, x_axis) in [("Distance", "Iteration"), ("Distance", "Time"),
  116.     #                        ("Squared Error", "Iteration"), ("Squared Error", "Time")]:
  117.     for (y_axis, x_axis) in [("Squared Error", "Time")]:
  118.         pp = pretty_plot(title="samples="+str(n_samples)+", features="+str(n_features),
  119.                          xlabel=x_axis, ylabel=y_axis,
  120.                          n_plots=len(results))
  121.         plt.plotkaczmarz(results, x_axis, y_axis, pp)
  122.     #print "getting true x"
  123.     #x_true = KZ.get_true_x(A,b)
  124.     #print "have true x"
  125.     """
  126.    results = []
  127.    #FKZ.runKaczmarz(A,b,x_true,"SADSRFdghjkgfdsadfgh")
  128.  
  129.    results.append((KZ.runKaczmarz(A,b,x_true, HE.getNextExample),"Heap"))
  130.  
  131.    #results.append((KZ.runKaczmarz(A,b,x_true, KZ.getNextExample_greedy),"Naive Greedy"))
  132.    results.append((runKaczmarz_heap(A,b,x_true),"Heap"))
  133.    #results.append((KZ.runKaczmarz(A,b,x_true, KZ.getNextExample_uni),"Uni"))
  134.    #results.append((KZ.runKaczmarz(A,b,x_true, KZ.getNextExample_non_uni),"Non_Uni"))
  135.    #results.append((KZ.runKaczmarz(A,b,x_true, LE.getNextExample),"Adapt"))
  136.    #results.append((KZ.runKaczmarz(A,b,x_true, LE.getNextExample_aux),"Adapt-Uni"))
  137.    for (y_axis, x_axis) in [("Distance", "Iteration"), ("Distance", "Time"),
  138.                             ("Squared Error", "Iteration"), ("Squared Error", "Time")]:
  139.        pp = pretty_plot(title="(less sparse) samples="+str(m)+", features="+str(n), xlabel=x_axis, ylabel=y_axis, n_plots=len(results))
  140.        plt.plotkaczmarz(results, x_axis, y_axis, pp)
  141.    """
  142. if __name__ == "__main__":
  143.     main()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement