mutual_information.m 971 B

1234567891011121314151617181920212223242526272829303132333435363738
  1. function mi = mutual_information(X,Y,binwidth)
  2. % mi = mutual_information(x,y)
  3. % X and Y are two discrete variables of the same size.
  4. % if variables are cateogircal or cell, turn into numeric.
  5. if iscell(X)
  6. X = categorical(X);
  7. end
  8. if iscategorical(X)
  9. X = double(X);
  10. end
  11. if iscell(Y)
  12. Y = categorical(Y);
  13. end
  14. if iscategorical(Y)
  15. Y = double(Y);
  16. end
  17. % the joint entropy of X and Y
  18. hxy = histcounts2(X,Y,'BinWidth',binwidth);
  19. pxy = hxy(:)./sum(hxy(:));
  20. pxy(pxy==0) = 1e-100;
  21. entropy_XY = sum(pxy.*log(pxy));
  22. % the entropy of X
  23. hx = histcounts(X,'BinWidth',binwidth);
  24. px = hx./sum(hx); %probability distribution
  25. px(px==0) = 1e-100; % to avoid error in log calculation
  26. entropy_X = sum(px .* log(px));
  27. % the entropy of Y
  28. hy = histcounts(Y,'BinWidth',binwidth);
  29. py = hy./sum(hy); %probability distribution
  30. py(py==0) = 1e-100; % to avoid error in log calculation
  31. entropy_Y = sum(py .* log(py));
  32. % mutual information
  33. mi = entropy_X + entropy_Y - entropy_XY;
  34. end