Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- # Accuracy
- with tf.name_scope("accuracy"):
- correct_predictions = tf.equal(self.predictions, tf.argmax(self.input_y, 1))
- self.accuracy = tf.reduce_mean(tf.cast(correct_predictions, "float"), name="accuracy")
- confusion_matrix[16][7]+=1
- y_ = tf.placeholder(tf.float32, shape=[None, 2])
- y = last_layer(d/2,2,h_fc1_drop)
- confusion_matrix_tf = tf.confusion_matrix(tf.argmax(y, 1), tf.argmax(y_, 1))
- cm = confusion_matrix_tf.eval(feed_dict={x: X_train, y_: y_train, keep_prob: 1.0})
- def beautyCM(cm, ind=['True pos', 'True neg'], cols=['Pred pos', 'Pred neg']):
- return pd.DataFrame(cm, index=ind, columns=cols)
- def precision(cm):
- # prec = TP / (TP + FP)
- try:
- return round(float(cm.loc['True pos', 'Pred pos']) /
- (cm.loc['True pos', 'Pred pos'] + cm.loc['True neg', 'Pred pos']), 4)
- except ZeroDivisionError:
- return 1.0
- def recall(cm):
- # prec = TP / (TP + FN)
- try:
- return round(float(cm.loc['True pos', 'Pred pos']) /
- (cm.loc['True pos', 'Pred pos'] + cm.loc['True pos', 'Pred neg']), 4)
- except ZeroDivisionError:
- return 1.0
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement