17#ifndef __dwi_tensor_h__
18#define __dwi_tensor_h__
29 template <
typename T,
class MatrixType>
30 inline Eigen::Matrix<T,Eigen::Dynamic,Eigen::Dynamic>
grad2bmatrix (
const MatrixType& grad,
bool dki =
false)
32 std::unique_ptr<DWI::Shells> shells;
36 WARN (
"Unable to separate diffusion gradient table into shells; tensor estimation success uncertain");
40 if (shells->count() < 3)
41 throw Exception (
"Kurtosis tensor estimation requires at least 3 b-value shells");
43 if (shells->count() < 2)
44 throw Exception (
"Tensor estimation requires at least 2 b-values");
48 Eigen::Matrix<T,Eigen::Dynamic,Eigen::Dynamic> bmat (grad.rows(), (dki ? 22 : 7));
49 for (ssize_t i = 0; i < grad.rows(); ++i) {
50 bmat (i,0) = grad(i,3) * grad(i,0) * grad(i,0);
51 bmat (i,1) = grad(i,3) * grad(i,1) * grad(i,1);
52 bmat (i,2) = grad(i,3) * grad(i,2) * grad(i,2);
53 bmat (i,3) = grad(i,3) * grad(i,0) * grad(i,1) * T(2.0);
54 bmat (i,4) = grad(i,3) * grad(i,0) * grad(i,2) * T(2.0);
55 bmat (i,5) = grad(i,3) * grad(i,1) * grad(i,2) * T(2.0);
58 bmat (i,7) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,0) * grad(i,0) * grad(i,0) * T(1.0)/T(6.0);
59 bmat (i,8) = -grad(i,3) * grad(i,3) * grad(i,1) * grad(i,1) * grad(i,1) * grad(i,1) * T(1.0)/T(6.0);
60 bmat (i,9) = -grad(i,3) * grad(i,3) * grad(i,2) * grad(i,2) * grad(i,2) * grad(i,2) * T(1.0)/T(6.0);
61 bmat (i,10) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,0) * grad(i,0) * grad(i,1) * T(2.0)/T(3.0);
62 bmat (i,11) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,0) * grad(i,0) * grad(i,2) * T(2.0)/T(3.0);
63 bmat (i,12) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,1) * grad(i,1) * grad(i,1) * T(2.0)/T(3.0);
64 bmat (i,13) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,2) * grad(i,2) * grad(i,2) * T(2.0)/T(3.0);
65 bmat (i,14) = -grad(i,3) * grad(i,3) * grad(i,1) * grad(i,1) * grad(i,1) * grad(i,2) * T(2.0)/T(3.0);
66 bmat (i,15) = -grad(i,3) * grad(i,3) * grad(i,1) * grad(i,2) * grad(i,2) * grad(i,2) * T(2.0)/T(3.0);
67 bmat (i,16) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,0) * grad(i,1) * grad(i,1);
68 bmat (i,17) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,0) * grad(i,2) * grad(i,2);
69 bmat (i,18) = -grad(i,3) * grad(i,3) * grad(i,1) * grad(i,1) * grad(i,2) * grad(i,2);
70 bmat (i,19) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,0) * grad(i,1) * grad(i,2) * T(2.0);
71 bmat (i,20) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,1) * grad(i,1) * grad(i,2) * T(2.0);
72 bmat (i,21) = -grad(i,3) * grad(i,3) * grad(i,0) * grad(i,1) * grad(i,2) * grad(i,2) * T(2.0);
78 template <
class MatrixType,
class VectorTypeOut,
class VectorTypeIn>
79 inline void dwi2tensor (VectorTypeOut& dt,
const MatrixType& binv, VectorTypeIn&
dwi)
81 using T =
typename VectorTypeIn::Scalar;
82 for (ssize_t i = 0; i <
dwi.size(); ++i)
83 dwi[i] =
dwi[i] > T(0.0) ? -std::log (
dwi[i]) : T(0.0);
88 template <
class VectorType>
inline typename VectorType::Scalar
tensor2ADC (
const VectorType& dt)
90 using T =
typename VectorType::Scalar;
91 return (dt[0]+dt[1]+dt[2]) / T (3.0);
95 template <
class VectorType>
inline typename VectorType::Scalar
tensor2FA (
const VectorType& dt)
97 using T =
typename VectorType::Scalar;
99 T a[] = { dt[0]-trace, dt[1]-trace, dt[2]-trace };
100 trace = dt[0]*dt[0] + dt[1]*dt[1] + dt[2]*dt[2] + T (2.0) * (dt[3]*dt[3] + dt[4]*dt[4] + dt[5]*dt[5]);
102 std::sqrt (T (1.5) * (a[0]*a[0]+a[1]*a[1]+a[2]*a[2] + T (2.0) * (dt[3]*dt[3]+dt[4]*dt[4]+dt[5]*dt[5])) / trace) :
107 template <
class VectorType>
inline typename VectorType::Scalar
tensor2RA (
const VectorType& dt)
109 using T =
typename VectorType::Scalar;
111 T a[] = { dt[0]-trace, dt[1]-trace, dt[2]-trace };
113 sqrt ( (a[0]*a[0]+a[1]*a[1]+a[2]*a[2]+ T (2.0) * (dt[3]*dt[3]+dt[4]*dt[4]+dt[5]*dt[5])) /T (3.0)) / trace :
VectorType::Scalar tensor2RA(const VectorType &dt)
Eigen::Matrix< T, Eigen::Dynamic, Eigen::Dynamic > grad2bmatrix(const MatrixType &grad, bool dki=false)
void dwi2tensor(VectorTypeOut &dt, const MatrixType &binv, VectorTypeIn &dwi)
VectorType::Scalar tensor2FA(const VectorType &dt)
VectorType::Scalar tensor2ADC(const VectorType &dt)