Advertisement
Guest User

Untitled

a guest
Mar 26th, 2019
71
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
Python 3.94 KB | None | 0 0
  1. import tensorflow as tf
  2. import numpy as np
  3. import data
  4. import matplotlib.pyplot as plt
  5. from sklearn.linear_model import LogisticRegression
  6. from sklearn.linear_model import SGDClassifier
  7. from functools import reduce
  8.  
  9.  
  10. class TFDeep:
  11.     def __init__(self, arch, param_delta=0.5, param_lambda=0.0):
  12.         if len(arch) < 2:
  13.             raise Exception('arch parameter must have at least 2 arguments: data dimensions (first), class num(last)')
  14.  
  15.         D, C = arch[0], arch[-1]
  16.  
  17.         self.X = h = sigmoids = tf.placeholder(tf.float32, shape=(None, D), name="input_X")
  18.         self.Yoh_ = tf.placeholder(tf.float32, shape=(None, C), name="labels_Y")
  19.         self.l2_loss = 0.0
  20.         self.weight_list = []
  21.  
  22.         # Create graph basics
  23.         for i in range(1, len(arch)):
  24.             weights = tf.Variable(tf.random_uniform([arch[i-1], arch[i]]), name="weights_" + str(i))
  25.             self.weight_list.append(weights)
  26.             self.l2_loss = tf.add(tf.nn.l2_loss(weights), self.l2_loss)
  27.             b = tf.Variable(tf.zeros(arch[i]), name="bias_" + str(i))
  28.             h = tf.matmul(sigmoids, weights) + b
  29.             sigmoids = tf.nn.sigmoid(h)
  30.  
  31.         self.probs = tf.nn.softmax(h)
  32.         self.l2_loss = param_lambda * self.l2_loss
  33.         self.loss = self.l2_loss + tf.losses.log_loss(self.Yoh_, self.probs)
  34.  
  35.         self.print_loss = tf.print("loss ", self.loss)
  36.         self.trainer = tf.train.GradientDescentOptimizer(param_delta)
  37.         self.train_step = self.trainer.minimize(self.loss)
  38.         self.session = tf.Session()
  39.  
  40.     def train(self, X, Yoh_, param_niter):
  41.         self.session.run(tf.initialize_all_variables())
  42.  
  43.         for i in range(0, param_niter):
  44.             self.session.run([self.train_step, self.print_loss], feed_dict={self.X: X, self.Yoh_: Yoh_})
  45.  
  46.     def count_params(self):
  47.         parameter_counter = 0
  48.         for parameter in tf.trainable_variables():
  49.             shape = parameter.get_shape()
  50.             tmp_cntr = 1
  51.             for dim in shape:
  52.                 tmp_cntr *= dim
  53.             parameter_counter += tmp_cntr
  54.             print(parameter.name)
  55.         return parameter_counter
  56.  
  57.     def eval(self, X):
  58.         return np.argmax(self.session.run(self.probs, feed_dict={self.X: X}), axis=1)
  59.  
  60.     def eval_weights(self, X_in):
  61.         weig = []
  62.         for i in range(0, self.weight_list.__len__()):
  63.             weig.append(self.session.run(self.weight_list[i], feed_dict={self.X: X_in}))
  64.  
  65.         return weig
  66.  
  67.  
  68. if __name__ == "__main__":
  69.     # inicijaliziraj generatore slučajnih brojeva
  70.     np.random.seed(100)
  71.     tf.set_random_seed(100)
  72.  
  73.     reg_factor = 1e-4
  74.     # instanciraj podatke X i labele Yoh_
  75.     K, C, N = 6, 3, 10
  76.     X, Y_ = data.sample_gmm(K, C, N)
  77.  
  78.     Yoh_ = np.zeros((N * K, C))
  79.     Yoh_[np.arange(N*K), Y_] = 1
  80.  
  81.     clf = LogisticRegression(solver='sag', C=1.0/reg_factor)
  82.     clf.fit(X, Y_)
  83.     Y_pred = clf.predict(X)
  84.     rect = (np.min(X, axis=0), np.max(X, axis=0))
  85.     data.graph_surface(clf.predict, rect, offset=0.5)
  86.     data.graph_data(X, Y_, Y_pred, special=[])
  87.     plt.show()
  88.  
  89.     # izgradi graf (izaberi jednu od donje tri opcije iz podzadatka)
  90.     # ako je relu smanjiti stopu ucenja na 0.005
  91.     tflr = TFDeep([2, 10, 10, C], 0.1, reg_factor)
  92.     # tflr = TFDeep([2, 10, C], 0.1, reg_factor)
  93.     # tflr = TFDeep([2, C], 0.1, reg_factor)
  94.  
  95.     # nauči parametre:
  96.     tflr.train(X, Yoh_, 10000)
  97.  
  98.     acc, prec_rec, _ = data.eval_perf_multi(Y_, tflr.eval(X))
  99.     print("Accuracy = ", acc)
  100.     print("Precision, Recall ", prec_rec)
  101.     print("Number of parameters: ", str(tflr.count_params()))
  102.  
  103.     # dohvati vjerojatnosti na skupu za učenje
  104.     probs = tflr.eval(X)
  105.  
  106.     # ispiši performansu (preciznost i odziv po razredima)
  107.  
  108.     # iscrtaj rezultate, decizijsku plohu
  109.     rect = (np.min(X, axis=0), np.max(X, axis=0))
  110.     data.graph_surface(tflr.eval, rect, offset=0.5)
  111.     data.graph_data(X, Y_, probs, special=[])
  112.     plt.show()
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement