Not a member of Pastebin yet?
Sign Up,
it unlocks many cool features!
- scipy.stats.entropy(pk, qk=None, base=None)
- import numpy as np
- def kl(p, q):
- """Kullback-Leibler divergence D(P || Q) for discrete distributions
- Parameters
- ----------
- p, q : array-like, dtype=float, shape=n
- Discrete probability distributions.
- """
- p = np.asarray(p, dtype=np.float)
- q = np.asarray(q, dtype=np.float)
- return np.sum(np.where(p != 0, p * np.log(p / q), 0))
- from itertools import product
- KL = [kl(hist_mat[:,i],hist_mat[:,j]) for i,j in product( range(0,K), range(0,K) )]
- def kullback_leibler_divergence(X):
- """
- Finds the pairwise Kullback-Leibler divergence
- matrix between all rows in X.
- Parameters
- ----------
- X : array_like, shape (n_samples, n_features)
- Array of probability data. Each row must sum to 1.
- Returns
- -------
- D : ndarray, shape (n_samples, n_samples)
- The Kullback-Leibler divergence matrix. A pairwise matrix D such that D_{i, j}
- is the divergence between the ith and jth vectors of the given matrix X.
- Notes
- -----
- Based on code from Gordon J. Berman et al.
- (https://github.com/gordonberman/MotionMapper)
- References:
- -----------
- Berman, G. J., Choi, D. M., Bialek, W., & Shaevitz, J. W. (2014).
- Mapping the stereotyped behaviour of freely moving fruit flies.
- Journal of The Royal Society Interface, 11(99), 20140672.
- """
- X_log = np.log(X)
- X_log[np.isinf(X_log) | np.isnan(X_log)] = 0
- entropies = -np.sum(X * X_log, axis=1)
- D = np.matmul(-X, X_log.T)
- D = D - entropies
- D = D / np.log(2)
- D *= (1 - np.eye(D.shape[0]))
- return D
Add Comment
Please, Sign In to add comment