00001
00002
00003
00004
00005
00006
00007
00008
00009
00010
00011 #ifndef TSV_META_DOT_DOT_H
00012 #define TSV_META_DOT_DOT_H
00013
00015
00016
00017
00019
00020 template<class T1, class T2> struct TSV_MetaDotDot {};
00021
00023
00024
00025
00027
00028 template<class T1, class T2, unsigned D>
00029 struct TSV_MetaDotDot< Tenzor<T1,D> , Tenzor<T2,D> >
00030 {
00031 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00032 inline static T0
00033 apply(const Tenzor<T1,D>& lhs, const Tenzor<T2,D>& rhs) {
00034 T0 sum = 0.0;
00035 for (unsigned int i=0; i<D; ++i)
00036 for (unsigned int j=0; j<D; ++j)
00037 sum += lhs(i,j) * rhs(i,j);
00038
00039 return sum;
00040 }
00041 };
00042
00043 template<class T1, class T2>
00044 struct TSV_MetaDotDot< Tenzor<T1,1> , Tenzor<T2,1> >
00045 {
00046 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00047 inline static T0
00048 apply(const Tenzor<T1,1>& lhs, const Tenzor<T2,1>& rhs) {
00049 return lhs[0]*rhs[0];
00050 }
00051 };
00052
00053 template<class T1, class T2>
00054 struct TSV_MetaDotDot< Tenzor<T1,2> , Tenzor<T2,2> >
00055 {
00056 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00057 inline static T0
00058 apply(const Tenzor<T1,2>& lhs, const Tenzor<T2,2>& rhs) {
00059 return lhs[0] * rhs[0] + lhs[1] * rhs[1] +
00060 lhs[2] * rhs[2] + lhs[3] * rhs[3];
00061 }
00062 };
00063
00064 template<class T1, class T2>
00065 struct TSV_MetaDotDot< Tenzor<T1,3> , Tenzor<T2,3> >
00066 {
00067 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00068 inline static T0
00069 apply(const Tenzor<T1,3>& lhs, const Tenzor<T2,3>& rhs) {
00070 return lhs[0] * rhs[0] + lhs[1] * rhs[1] +
00071 lhs[2] * rhs[2] + lhs[3] * rhs[3] + lhs[4] * rhs[4] +
00072 lhs[5] * rhs[5] + lhs[6] * rhs[6] + lhs[7] * rhs[7] +
00073 lhs[8] * rhs[8];
00074 }
00075 };
00076
00078
00079
00080
00082
00083 template<class T1, class T2, unsigned D>
00084 struct TSV_MetaDotDot< SymTenzor<T1,D> , SymTenzor<T2,D> >
00085 {
00086 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00087 inline static T0
00088 apply(const SymTenzor<T1,D>& lhs, const SymTenzor<T2,D>& rhs) {
00089 T0 sum = 0.0;
00090 for (unsigned int i=0; i<D; ++i)
00091 sum += lhs.HL(i, i) * rhs.HL(i, i);
00092
00093 for (unsigned int i=0; i<D; ++i)
00094 for (unsigned int j=i+1; j<D; ++j)
00095 sum += 2.0 * lhs.HL(j, i) * rhs.HL(j, i);
00096
00097 return sum;
00098 }
00099 };
00100
00101 template<class T1, class T2>
00102 struct TSV_MetaDotDot< SymTenzor<T1,1> , SymTenzor<T2,1> >
00103 {
00104 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00105 inline static T0
00106 apply(const SymTenzor<T1,1>& lhs, const SymTenzor<T2,1>& rhs) {
00107 return lhs[0] * rhs[0];
00108 }
00109 };
00110
00111 template<class T1, class T2>
00112 struct TSV_MetaDotDot< SymTenzor<T1,2> , SymTenzor<T2,2> >
00113 {
00114 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00115 inline static T0
00116 apply(const SymTenzor<T1,2>& lhs, const SymTenzor<T2,2>& rhs) {
00117 return lhs(0,0) * rhs(0,0) + lhs(1,1) * rhs(1,1) +
00118 2.0 * lhs(0,1) * rhs(0,1);
00119 }
00120 };
00121
00122 template<class T1, class T2>
00123 struct TSV_MetaDotDot< SymTenzor<T1,3> , SymTenzor<T2,3> >
00124 {
00125 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00126 inline static T0
00127 apply(const SymTenzor<T1,3>& lhs, const SymTenzor<T2,3>& rhs) {
00128 return lhs(0,0) * rhs(0,0) + lhs(1,1) * rhs(1,1) + lhs(2,2) * rhs(2,2) +
00129 2.0 * (lhs(0,1) * rhs(0,1) + lhs(0,2) * rhs(0,2) +
00130 lhs(1,2) * rhs(1,2));
00131 }
00132 };
00133
00135
00136
00137
00139
00140 template<class T1, class T2, unsigned D>
00141 struct TSV_MetaDotDot< SymTenzor<T1,D> , Tenzor<T2,D> >
00142 {
00143 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00144 inline static T0
00145 apply(const SymTenzor<T1,D>& lhs, const Tenzor<T2,D>& rhs) {
00146 T0 sum = 0.0;
00147 for (unsigned int i=0; i<D; ++i)
00148 for (unsigned int j=0; j<D; ++j)
00149 sum += lhs(i,j) * rhs(i,j);
00150
00151 return sum;
00152 }
00153 };
00154
00155 template<class T1, class T2>
00156 struct TSV_MetaDotDot< SymTenzor<T1,1> , Tenzor<T2,1> >
00157 {
00158 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00159 inline static T0
00160 apply(const SymTenzor<T1,1>& lhs, const Tenzor<T2,1>& rhs) {
00161 return lhs[0]*rhs[0];
00162 }
00163 };
00164
00165 template<class T1, class T2>
00166 struct TSV_MetaDotDot< SymTenzor<T1,2> , Tenzor<T2,2> >
00167 {
00168 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00169 inline static T0
00170 apply(const SymTenzor<T1,2>& lhs, const Tenzor<T2,2>& rhs) {
00171 return lhs(0,0) * rhs(0,0) + lhs(0,1) * (rhs(0,1) + rhs(1,0)) +
00172 lhs(1,1) * rhs(1,1);
00173 }
00174 };
00175
00176 template<class T1, class T2>
00177 struct TSV_MetaDotDot< SymTenzor<T1,3> , Tenzor<T2,3> >
00178 {
00179 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00180 inline static T0
00181 apply(const SymTenzor<T1,3>& lhs, const Tenzor<T2,3>& rhs) {
00182 return lhs(0,0) * rhs(0,0) + lhs(0,1) * (rhs(0,1) + rhs(1,0)) +
00183 + lhs(0,2) * (rhs(0,2) + rhs(2,0)) + lhs(1,1) * rhs(1,1) +
00184 lhs(1,2) * (rhs(1,2) + rhs(2,1)) + lhs(2,2) * rhs(2,2);
00185 }
00186 };
00187
00189
00190
00191
00193
00194 template<class T1, class T2, unsigned D>
00195 struct TSV_MetaDotDot< Tenzor<T1,D> , SymTenzor<T2,D> >
00196 {
00197 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00198 inline static T0
00199 apply(const Tenzor<T1,D>& lhs, const SymTenzor<T2,D>& rhs) {
00200 T0 sum = 0.0;
00201 for (unsigned int i=0; i<D; ++i)
00202 for (unsigned int j=0; j<D; ++j)
00203 sum += lhs(i,j) * rhs(j,j);
00204
00205 return sum;
00206 }
00207 };
00208
00209 template<class T1, class T2>
00210 struct TSV_MetaDotDot< Tenzor<T1,1> , SymTenzor<T2,1> >
00211 {
00212 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00213 inline static T0
00214 apply(const Tenzor<T1,1>& lhs, const SymTenzor<T2,1>& rhs) {
00215 return lhs[0]*rhs[0];
00216 }
00217 };
00218
00219 template<class T1, class T2>
00220 struct TSV_MetaDotDot< Tenzor<T1,2> , SymTenzor<T2,2> >
00221 {
00222 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00223 inline static T0
00224 apply(const Tenzor<T1,2>& lhs, const SymTenzor<T2,2>& rhs) {
00225 return lhs(0,0) * rhs(0,0) + (lhs(0,1) + lhs(1,0)) * rhs(0,1) +
00226 lhs(1,1) * rhs(1,1);
00227 }
00228 };
00229
00230 template<class T1, class T2>
00231 struct TSV_MetaDotDot< Tenzor<T1,3> , SymTenzor<T2,3> >
00232 {
00233 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00234 inline static T0
00235 apply(const Tenzor<T1,3>& lhs, const SymTenzor<T2,3>& rhs) {
00236 return lhs(0,0) * rhs(0,0) + (lhs(0,1) + lhs(1,0)) * rhs(0,1) +
00237 lhs(1,1) * rhs(1,1) + (lhs(0,2) + lhs(2,0)) * rhs(0,2) +
00238 (lhs(1,2) + lhs(2,1)) * rhs(1,2) + lhs(2,2) * rhs(2,2);
00239 }
00240 };
00241
00243
00244
00245
00247
00248 template<class T1, class T2, unsigned D>
00249 struct TSV_MetaDotDot< AntiSymTenzor<T1,D> , AntiSymTenzor<T2,D> >
00250 {
00251 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00252 inline static T0
00253 apply(const AntiSymTenzor<T1,D>& lhs, const AntiSymTenzor<T2,D>& rhs) {
00254 T0 sum = lhs[0]*rhs[0];
00255 for ( int i=1; i<D*(D-1)/2; ++i)
00256 sum += lhs[i]*rhs[i];
00257 return sum+sum;
00258 }
00259 };
00260
00261 template<class T1, class T2>
00262 struct TSV_MetaDotDot< AntiSymTenzor<T1,2> , AntiSymTenzor<T2,2> >
00263 {
00264 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00265 inline static T0
00266 apply(const AntiSymTenzor<T1,2>& lhs, const AntiSymTenzor<T2,2>& rhs) {
00267 T0 sum = lhs[0]*rhs[0];
00268 return sum+sum;
00269 }
00270 };
00271
00272 template<class T1, class T2>
00273 struct TSV_MetaDotDot< AntiSymTenzor<T1,3> , AntiSymTenzor<T2,3> >
00274 {
00275 typedef typename PETEBinaryReturn<T1,T2,OpMultipply>::type T0;
00276 inline static T0
00277 apply(const AntiSymTenzor<T1,3>& lhs, const AntiSymTenzor<T2,3>& rhs) {
00278 T0 sum = lhs[0]*rhs[0]+lhs[1]*rhs[1]+lhs[2]*rhs[2];
00279 return sum+sum;
00280 }
00281 };
00282
00284
00285 #endif // TSV_META_DOT_DOT_H
00286
00287
00288
00289
00290
00291
00292