norm_entropy.m 1.0 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546
  1. function [H, bits] = norm_entropy(data,varargin)
  2. % This returns the normalized entropy for discrete data
  3. % Works on 1D data.
  4. if min(size(data))>1
  5. error('operated on 1-D input')
  6. end
  7. ent_type = utils.inputordefault('ent_type','renyi',varargin);
  8. alpha = utils.inputordefault('alpha',5,varargin);
  9. ud = unique(data);
  10. P = 0;
  11. n_samp = numel(data);
  12. if iscell(data)
  13. datacat = categorical(data, ud);
  14. data = datacat;
  15. end
  16. switch ent_type
  17. case 'shannon'
  18. for ux = 1:numel(ud)
  19. tmpP = sum(data == ud(ux))/n_samp;
  20. P = P + tmpP .* log2(tmpP);
  21. end
  22. H = -P / log2(numel(ud));
  23. bits = -P;
  24. case 'renyi'
  25. Palpha = 0;
  26. for ux = 1:numel(ud)
  27. tmpP = sum(data == ud(ux))/n_samp;
  28. P = P + tmpP;
  29. Palpha = Palpha + tmpP.^alpha;
  30. end
  31. H = 1/(1-alpha) * log2(Palpha/P) / log2(numel(ud));
  32. bits = 1/(1-alpha) * log2(Palpha/P);
  33. otherwise
  34. error('norm_entropy','do not know how to calculate %s',ent_type)
  35. end