spm_dcm_J.m 6.9 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205
  1. function [J] = spm_dcm_J(Y,U,X0,dt,R)
  2. % VOI extraction of adjusted data and Markov Blanket decomposition
  3. % FORMAT [J] = spm_dcm_J(Y,U,X0,dt,R)
  4. %
  5. % Y - response variable
  6. % U - exogenous inputs
  7. % XO - confounds
  8. % dt - time bin
  9. % R - element wise retriction matrix
  10. %
  11. %__________________________________________________________________________
  12. % This routine evaluates the effective connectivity of a dynamic causal
  13. % model based upon the Jacobian (i.e., state matrix) of a stochastic
  14. % differential equation. In other words, it approximates the coupling among
  15. % hidden states to first order, under some simplifying assumptions.
  16. % Starting from a linear state space model, in which the response variable
  17. % (y) is a linear convolution (K) of some hidden states (x) subject to
  18. % observation and system noise (r and e) respectively, we have:
  19. %
  20. % D*x = x*J' + e => K*D*x = K*x*J' + K*e = D*y = y*J' + K*e + D*r - r*J'
  21. % y = K*x + r => D*y = K*D*x + D*r
  22. %
  23. % This means we can approximate the system with a general linear model:
  24. %
  25. % D*y = y*J' + w: cov(w) = h(1)*K*K' + h(2)*D*D' + h(3)*I
  26. %
  27. % Where, h(3)*I = h(2)*J*J', approximately; noting that the leading
  28. % diagonal of J will dominate (and be negative). If K is specified in terms
  29. % of convolution kernels, then the covariance components of the linearised
  30. % system can be expressed as:
  31. %
  32. % K = k(1)*K{1} + k(2)*K{2} + ...
  33. % => K*K' = k(1)*k(1)*K{1}*K{1}' + k(1)*k(2)*K{1}*K{2}' ...
  34. %
  35. % Where k(i)*k(j) replaces the [hyper]parameter h(1) above. This linearized
  36. % system can be solved using parametric empirical Bayes (PEB) for each
  37. % response variable, under the simplifying assumption that there are the
  38. % same number of observations and hidden states.
  39. %
  40. % This allows large graphs to be inverted by considering the afferents
  41. % (i.e., influences on) to each node sequentially. Redundant elements of
  42. % the Jacobian (i.e., connections) are subsequently removed using Bayesian
  43. % model reduction (BMR). The result is a sparse Jacobian that corresponds
  44. % to the coupling among hidden states that generate observed double
  45. % responses, to first-order.
  46. %
  47. % See: Frässle S, Lomakina EI, Kasper L, Manjaly ZM, Leff A, Pruessmann KP,
  48. % Buhmann JM, Stephan KE. A generative model of whole-brain effective
  49. % connectivity.Neuroimage. 2018 Oct 1;179:505-529.
  50. %__________________________________________________________________________
  51. % Copyright (C) 2008-2014 Wellcome Trust Centre for Neuroimaging
  52. % Karl Friston
  53. % $Id: spm_dcm_J.m 7679 2019-10-24 15:54:07Z spm $
  54. % first order Jacobian
  55. %==========================================================================
  56. % convolution operators
  57. %--------------------------------------------------------------------------
  58. [nt,nu] = size(U);
  59. [nt,nv] = size(Y);
  60. xBF.dt = dt;
  61. xBF.name = 'hrf';
  62. xBF.length = 32;
  63. xBF = spm_get_bf(xBF);
  64. K{1} = spm_convmtx(xBF.bf,nt,'square');
  65. K{2} = spm_convmtx([1; 0],nt,'square');
  66. K{3} = spm_convmtx([1;-1],nt,'square');
  67. if nargin < 5
  68. R = ones(nv,nv);
  69. end
  70. % covariance components
  71. %--------------------------------------------------------------------------
  72. for k = 1:numel(K)
  73. C{k} = K{k}*K{k}';
  74. C{k} = nt*C{k}/trace(C{k})/64;
  75. end
  76. % remove confounds
  77. %--------------------------------------------------------------------------
  78. Y = Y - X0*pinv(X0)*Y;
  79. Y = Y/std(Y(:));
  80. U = U/std(U(:));
  81. J = zeros(nv + nu,nv + nu);
  82. for v = 1:nv
  83. % Parametric (empirical) Bayes
  84. %----------------------------------------------------------------------
  85. dY = gradient(Y(:,v),dt);
  86. pE{v} = zeros(nv + nu,1);
  87. pC{v} = speye(nv + nu,nv + nu);
  88. pE{v}(v) = -1;
  89. pC{v}(v,v) = 1/512;
  90. r = find( R(:,v));
  91. s = find(~R(:,v));
  92. u = [r; (1:nu)' + nv];
  93. pE{v}(s) = 0;
  94. pC{v}(s,s) = 0;
  95. P{1}.X = [Y(:,r) U];
  96. P{1}.C = C;
  97. P{2}.X = pE{v}(u);
  98. P{2}.C = pC{v}(u,u);
  99. PEB = spm_PEB(dY,P,512);
  100. Ep{v} = pE{v};
  101. Cp{v} = pC{v};
  102. Ep{v}(u) = PEB{2}.E;
  103. Cp{v}(u,u) = PEB{2}.C;
  104. end
  105. % Bayesian Model Reduction
  106. %==========================================================================
  107. OPT = 'symmetric';
  108. switch OPT
  109. case 'none'
  110. % no reduction
  111. %------------------------------------------------------------------
  112. for v = 1:nv
  113. % Jacobian for this voxel
  114. %--------------------------------------------------------------
  115. J(:,v) = Ep{v};
  116. end
  117. case 'symmetric'
  118. % reciprocal coupling shrinkage priors
  119. %------------------------------------------------------------------
  120. for i = 1:nv
  121. for j = (i + 1):nv
  122. if R(i,j)
  123. % afferent connection
  124. %----------------------------------------------------------
  125. rCi = pC{i};
  126. rCi(j,j) = 0;
  127. [Fi,sEi,sCi] = spm_log_evidence(Ep{i},Cp{i},pE{i},pC{i},pE{i},rCi);
  128. % efferent connection
  129. %----------------------------------------------------------
  130. rCj = pC{j};
  131. rCj(i,i) = 0;
  132. [Fj,sEj,sCj] = spm_log_evidence(Ep{j},Cp{j},pE{j},pC{j},pE{j},rCj);
  133. % accept new priors if F has increased
  134. %----------------------------------------------------------
  135. if (Fi + Fj) > 3
  136. Ep{i} = sEi;
  137. Ep{j} = sEj;
  138. Cp{i} = sCi;
  139. Cp{j} = sCj;
  140. pC{i} = rCi;
  141. pC{j} = rCj;
  142. end
  143. end
  144. end
  145. fprintf('evaluating (%i) of %i states\n',i,nv)
  146. end
  147. % assemble Jacobian
  148. %------------------------------------------------------------------
  149. for v = 1:nv
  150. J(:,v) = Ep{v};
  151. end
  152. case 'all'
  153. % Jacobian for this voxel
  154. %------------------------------------------------------------------
  155. for v = 1:nv
  156. % Bayesian Model Reduction
  157. %--------------------------------------------------------------
  158. BMR.M.pE = pE;
  159. BMR.M.pC = pC;
  160. BMR.Ep = Ep;
  161. BMR.Cp = Cp;
  162. BMR = spm_dcm_bmr_all(BMR,'all','BMS');
  163. % Jacobian for this voxel
  164. %--------------------------------------------------------------
  165. J(:,v) = BMR.Ep;
  166. end
  167. otherwise
  168. end
  169. % retain coupling between states
  170. %--------------------------------------------------------------------------
  171. J = J(1:nv,1:nv);