1234567891011121314151617181920212223242526272829303132333435363738 |
- function mi = mutual_information(X,Y,binwidth)
- % mi = mutual_information(x,y)
- % X and Y are two discrete variables of the same size.
- % if variables are cateogircal or cell, turn into numeric.
- if iscell(X)
- X = categorical(X);
- end
- if iscategorical(X)
- X = double(X);
- end
- if iscell(Y)
- Y = categorical(Y);
- end
- if iscategorical(Y)
- Y = double(Y);
- end
- % the joint entropy of X and Y
- hxy = histcounts2(X,Y,'BinWidth',binwidth);
- pxy = hxy(:)./sum(hxy(:));
- pxy(pxy==0) = 1e-100;
- entropy_XY = sum(pxy.*log(pxy));
- % the entropy of X
- hx = histcounts(X,'BinWidth',binwidth);
- px = hx./sum(hx); %probability distribution
- px(px==0) = 1e-100; % to avoid error in log calculation
- entropy_X = sum(px .* log(px));
- % the entropy of Y
- hy = histcounts(Y,'BinWidth',binwidth);
- py = hy./sum(hy); %probability distribution
- py(py==0) = 1e-100; % to avoid error in log calculation
- entropy_Y = sum(py .* log(py));
- % mutual information
- mi = entropy_X + entropy_Y - entropy_XY;
- end
|