fig8_2.m 1.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748
  1. % Local Regression and Likelihood, Figure 8.2.
  2. %
  3. % Discrimination/Classification, simple example using
  4. % density estimation.
  5. %
  6. % First, compute density estimates fit0, fit1 ('family','rate'
  7. % - output is in events per unit area) for each class in the
  8. % training sample. The ratio fit1/(fit1+fit0) estimates the
  9. % posterior probability that an observation comes from population 1.
  10. %
  11. % plotting the classification boundary is slightly tricky - it depends
  12. % on both fits, so lfplot() can't be used. Instead, both fits must be
  13. % evaluated on the same grid of values, which is then used to make a
  14. % contour plot.
  15. %
  16. % Author: Catherine Loader
  17. load cltrain;
  18. u0 = find(y==0);
  19. u1 = find(y==1);
  20. fit0 = locfit([x1(u0) x2(u0)],y(u0),'family','rate','scale',0);
  21. fit1 = locfit([x1(u1) x2(u1)],y(u1),'family','rate','scale',0);
  22. v0 = -3+6*(0:50)'/50;
  23. v1 = -2.2+4.2*(0:49)'/49;
  24. % predict returns log(rate)
  25. z = predict(fit0,{v0 v1})-predict(fit1,{v0 v1});
  26. z = reshape(z,51,50);
  27. figure('Name','fig8_2: classification');
  28. contour(v0,v1,z',[0 0]);
  29. hold on;
  30. plot(x1(u0),x2(u0),'.');
  31. plot(x1(u1),x2(u1),'.','color','red');
  32. hold off;
  33. p0 = predict(fit0,[x1 x2]);
  34. p1 = predict(fit1,[x1 x2]);
  35. py = (p1 > p0);
  36. disp('Classification table for training data');
  37. tabulate(10*y+py);
  38. load cltest;
  39. p0 = predict(fit0,[x1 x2]);
  40. p1 = predict(fit1,[x1 x2]);
  41. py = (p1 > p0);
  42. disp('Classification table for test data');
  43. tabulate(10*y+py);