Advertisement
Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- import keras.backend as K
- import numpy as np
- inp = Input(shape=(x_train_show.shape[1],1000))
- nnet = Dense(64, activation='relu')(inp)
- nnet = Dropout(0.1)(nnet)
- nnet = Dense(128, activation='relu')(nnet)
- output = Dense(1, activation='sigmoid')(nnet)
- model = Model(inputs=inp, outputs=output)
- spec_loss = specificity_loss_wrapper()
- model.compile(loss=spec_loss,optimizer=Adam(lr=0.00001))
- def compute_specificity(y_pred, y_true):
- """Compute the confusion matrix for a set of predictions.
- Parameters
- ----------
- y_pred : predicted values for a batch if samples (must be binary: 0 or 1)
- y_true : correct values for the set of samples used (must be binary: 0 or 1)
- Returns
- -------
- out : the specificity
- """
- num_classes = 2
- conf_mat = np.zeros((num_classes, num_classes))
- for i in range(K.flatten(y_true).shape[0].value):
- conf_mat[y_true[i]][y_pred[i]] += 1
- # Convert the conf_mat to a Keras Tensor
- conf_mat = K.variable(conf_mat, dtype=np.int32))
- # Compute the actual specificity
- specificity = conf_mat[0,0] / (conf_mat[0,0] + conf_mat[0, 1])
- # Could also compute other metrics from the conf_mat, such as:
- # sensitivity, false discovery rate, F1 score, Jaccard index etc.
- return specificity
- def specificity_loss_wrapper():
- """A wrapper to create and return a function which computes the specificity loss, as (1 - specificity)
- """
- # Define the function for your loss
- def specificity_loss(y_true, y_pred):
- return 1.0 - compute_specificity(y_true, y_pred)
- return specificity_loss # we return this function object
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement