xref: /petsc/src/mat/impls/baij/seq/dgefa4.c (revision 2e92ee13a8395f820cc1e3fd74a7607ed52efa2a)
1be1d678aSKris Buschelman 
24224c193SBarry Smith /*
38a36c062SBarry Smith        Inverts 4 by 4 matrix using partial pivoting.
471c5468dSBarry Smith 
571c5468dSBarry Smith        Used by the sparse factorization routines in
6dd882469SBarry Smith      src/mat/impls/baij/seq
771c5468dSBarry Smith 
871c5468dSBarry Smith        This is a combination of the Linpack routines
971c5468dSBarry Smith     dgefa() and dgedi() specialized for a size of 4.
1071c5468dSBarry Smith 
114224c193SBarry Smith */
12c6db04a5SJed Brown #include <petscsys.h>
134224c193SBarry Smith 
144a2ae208SSatish Balay #undef __FUNCT__
1596b95a6bSBarry Smith #define __FUNCT__ "PetscKernel_A_gets_inverse_A_4"
16*2e92ee13SHong Zhang PETSC_EXTERN PetscErrorCode PetscKernel_A_gets_inverse_A_4(MatScalar *a,PetscReal shift,PetscBool allowzeropivot,PetscBool *zeropivotdetected)
174224c193SBarry Smith {
18690b6cddSBarry Smith   PetscInt  i__2,i__3,kp1,j,k,l,ll,i,ipvt[4],kb,k3;
19690b6cddSBarry Smith   PetscInt  k4,j3;
20b48ee343SBarry Smith   MatScalar *aa,*ax,*ay,work[16],stmp;
21329f5518SBarry Smith   MatReal   tmp,max;
224224c193SBarry Smith 
234224c193SBarry Smith   /* gaussian elimination with partial pivoting */
244224c193SBarry Smith 
253a40ed3dSBarry Smith   PetscFunctionBegin;
260daa89e9SHong Zhang   shift = .25*shift*(1.e-12 + PetscAbsScalar(a[0]) + PetscAbsScalar(a[5]) + PetscAbsScalar(a[10]) + PetscAbsScalar(a[15]));
274224c193SBarry Smith   /* Parameter adjustments */
288a36c062SBarry Smith   a -= 5;
294224c193SBarry Smith 
308a36c062SBarry Smith   for (k = 1; k <= 3; ++k) {
314224c193SBarry Smith     kp1 = k + 1;
328a36c062SBarry Smith     k3  = 4*k;
334224c193SBarry Smith     k4  = k3 + k;
344224c193SBarry Smith     /* find l = pivot index */
354224c193SBarry Smith 
36ed33f8a5SSatish Balay     i__2 = 5 - k;
374224c193SBarry Smith     aa   = &a[k4];
384224c193SBarry Smith     max  = PetscAbsScalar(aa[0]);
394224c193SBarry Smith     l    = 1;
404224c193SBarry Smith     for (ll=1; ll<i__2; ll++) {
414224c193SBarry Smith       tmp = PetscAbsScalar(aa[ll]);
424224c193SBarry Smith       if (tmp > max) { max = tmp; l = ll+1;}
434224c193SBarry Smith     }
444224c193SBarry Smith     l        += k - 1;
45da10e913SBarry Smith     ipvt[k-1] = l;
464224c193SBarry Smith 
475b8514ebSBarry Smith     if (a[l + k3] == 0.0) {
4826fbe8dcSKarl Rupp       if (shift == 0.0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot, row %D",k-1);
4926fbe8dcSKarl Rupp       else {
501bfe5b3dSBarry Smith         /* SHIFT is applied to SINGLE diagonal entry; does this make any sense? */
511bfe5b3dSBarry Smith         a[l + k3] = shift;
521bfe5b3dSBarry Smith       }
534224c193SBarry Smith     }
544224c193SBarry Smith 
554224c193SBarry Smith     /* interchange if necessary */
564224c193SBarry Smith 
574224c193SBarry Smith     if (l != k) {
584224c193SBarry Smith       stmp      = a[l + k3];
594224c193SBarry Smith       a[l + k3] = a[k4];
604224c193SBarry Smith       a[k4]     = stmp;
614224c193SBarry Smith     }
624224c193SBarry Smith 
634224c193SBarry Smith     /* compute multipliers */
644224c193SBarry Smith 
654224c193SBarry Smith     stmp = -1. / a[k4];
668a36c062SBarry Smith     i__2 = 4 - k;
674224c193SBarry Smith     aa   = &a[1 + k4];
6826fbe8dcSKarl Rupp     for (ll=0; ll<i__2; ll++) aa[ll] *= stmp;
694224c193SBarry Smith 
704224c193SBarry Smith     /* row elimination with column indexing */
714224c193SBarry Smith 
724224c193SBarry Smith     ax = &a[k4+1];
738a36c062SBarry Smith     for (j = kp1; j <= 4; ++j) {
748a36c062SBarry Smith       j3   = 4*j;
754224c193SBarry Smith       stmp = a[l + j3];
764224c193SBarry Smith       if (l != k) {
774224c193SBarry Smith         a[l + j3] = a[k + j3];
784224c193SBarry Smith         a[k + j3] = stmp;
794224c193SBarry Smith       }
804224c193SBarry Smith 
818a36c062SBarry Smith       i__3 = 4 - k;
824224c193SBarry Smith       ay   = &a[1+k+j3];
8326fbe8dcSKarl Rupp       for (ll=0; ll<i__3; ll++) ay[ll] += stmp*ax[ll];
844224c193SBarry Smith     }
854224c193SBarry Smith   }
86da10e913SBarry Smith   ipvt[3] = 4;
87*2e92ee13SHong Zhang   if (a[20] == 0.0) {
88*2e92ee13SHong Zhang     PetscErrorCode ierr;
89*2e92ee13SHong Zhang     if (allowzeropivot) {
90*2e92ee13SHong Zhang       ierr = PetscInfo1(NULL,"Zero pivot, row %D\n",3);CHKERRQ(ierr);
91*2e92ee13SHong Zhang       *zeropivotdetected = PETSC_TRUE;
92*2e92ee13SHong Zhang     } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot, row %D",3);
93*2e92ee13SHong Zhang   }
944224c193SBarry Smith 
954224c193SBarry Smith   /*
964224c193SBarry Smith    Now form the inverse
974224c193SBarry Smith   */
984224c193SBarry Smith 
994224c193SBarry Smith   /* compute inverse(u) */
1004224c193SBarry Smith 
1018a36c062SBarry Smith   for (k = 1; k <= 4; ++k) {
1028a36c062SBarry Smith     k3    = 4*k;
1034224c193SBarry Smith     k4    = k3 + k;
1044224c193SBarry Smith     a[k4] = 1.0 / a[k4];
1054224c193SBarry Smith     stmp  = -a[k4];
1064224c193SBarry Smith     i__2  = k - 1;
1074224c193SBarry Smith     aa    = &a[k3 + 1];
1084224c193SBarry Smith     for (ll=0; ll<i__2; ll++) aa[ll] *= stmp;
1094224c193SBarry Smith     kp1 = k + 1;
1108a36c062SBarry Smith     if (4 < kp1) continue;
1114224c193SBarry Smith     ax = aa;
1128a36c062SBarry Smith     for (j = kp1; j <= 4; ++j) {
1138a36c062SBarry Smith       j3        = 4*j;
1144224c193SBarry Smith       stmp      = a[k + j3];
1154224c193SBarry Smith       a[k + j3] = 0.0;
1164224c193SBarry Smith       ay        = &a[j3 + 1];
11726fbe8dcSKarl Rupp       for (ll=0; ll<k; ll++) ay[ll] += stmp*ax[ll];
1184224c193SBarry Smith     }
1194224c193SBarry Smith   }
1204224c193SBarry Smith 
1214224c193SBarry Smith   /* form inverse(u)*inverse(l) */
1224224c193SBarry Smith 
1238a36c062SBarry Smith   for (kb = 1; kb <= 3; ++kb) {
1248a36c062SBarry Smith     k   = 4 - kb;
1258a36c062SBarry Smith     k3  = 4*k;
1264224c193SBarry Smith     kp1 = k + 1;
1274224c193SBarry Smith     aa  = a + k3;
1288a36c062SBarry Smith     for (i = kp1; i <= 4; ++i) {
129b48ee343SBarry Smith       work[i-1] = aa[i];
1304224c193SBarry Smith       aa[i]     = 0.0;
1314224c193SBarry Smith     }
1328a36c062SBarry Smith     for (j = kp1; j <= 4; ++j) {
133b48ee343SBarry Smith       stmp   = work[j-1];
1348a36c062SBarry Smith       ax     = &a[4*j + 1];
1354224c193SBarry Smith       ay     = &a[k3 + 1];
1364224c193SBarry Smith       ay[0] += stmp*ax[0];
1374224c193SBarry Smith       ay[1] += stmp*ax[1];
1384224c193SBarry Smith       ay[2] += stmp*ax[2];
1398a36c062SBarry Smith       ay[3] += stmp*ax[3];
1404224c193SBarry Smith     }
141da10e913SBarry Smith     l = ipvt[k-1];
1424224c193SBarry Smith     if (l != k) {
1434224c193SBarry Smith       ax   = &a[k3 + 1];
1448a36c062SBarry Smith       ay   = &a[4*l + 1];
1454224c193SBarry Smith       stmp = ax[0]; ax[0] = ay[0]; ay[0] = stmp;
1464224c193SBarry Smith       stmp = ax[1]; ax[1] = ay[1]; ay[1] = stmp;
1474224c193SBarry Smith       stmp = ax[2]; ax[2] = ay[2]; ay[2] = stmp;
1488a36c062SBarry Smith       stmp = ax[3]; ax[3] = ay[3]; ay[3] = stmp;
1494224c193SBarry Smith     }
1504224c193SBarry Smith   }
1513a40ed3dSBarry Smith   PetscFunctionReturn(0);
1524224c193SBarry Smith }
1534224c193SBarry Smith 
1549b445006SKris Buschelman #if defined(PETSC_HAVE_SSE)
1559b445006SKris Buschelman #include PETSC_HAVE_SSE
15699148eceSKris Buschelman 
15799148eceSKris Buschelman #undef __FUNCT__
15896b95a6bSBarry Smith #define __FUNCT__ "PetscKernel_A_gets_inverse_A_4_SSE"
159d4b5b1e2SBarry Smith PETSC_EXTERN PetscErrorCode PetscKernel_A_gets_inverse_A_4_SSE(float *a)
16099148eceSKris Buschelman {
16199148eceSKris Buschelman   /*
1629b445006SKris Buschelman      This routine is converted from Intel's Small Matrix Library.
16399148eceSKris Buschelman      See: Streaming SIMD Extensions -- Inverse of 4x4 Matrix
16499148eceSKris Buschelman      Order Number: 245043-001
16599148eceSKris Buschelman      March 1999
16699148eceSKris Buschelman      http://www.intel.com
16799148eceSKris Buschelman 
16899148eceSKris Buschelman      Inverse of a 4x4 matrix via Kramer's Rule:
16999148eceSKris Buschelman      bool Invert4x4(SMLXMatrix &);
17099148eceSKris Buschelman   */
17199148eceSKris Buschelman   PetscFunctionBegin;
1729b445006SKris Buschelman   SSE_SCOPE_BEGIN;
1739b445006SKris Buschelman   SSE_INLINE_BEGIN_1(a)
1749b445006SKris Buschelman 
17599148eceSKris Buschelman /* ----------------------------------------------- */
1769b445006SKris Buschelman 
1779b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_0,XMM0)
1789b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_4,XMM0)
1799b445006SKris Buschelman 
1809b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_8,XMM5)
1819b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_12,XMM5)
1829b445006SKris Buschelman 
1839b445006SKris Buschelman   SSE_COPY_PS(XMM3,XMM0)
1849b445006SKris Buschelman   SSE_SHUFFLE(XMM3,XMM5,0x88)
1859b445006SKris Buschelman 
1869b445006SKris Buschelman   SSE_SHUFFLE(XMM5,XMM0,0xDD)
1879b445006SKris Buschelman 
1889b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_2,XMM0)
1899b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_6,XMM0)
1909b445006SKris Buschelman 
1919b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_10,XMM6)
1929b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_14,XMM6)
1939b445006SKris Buschelman 
1949b445006SKris Buschelman   SSE_COPY_PS(XMM4,XMM0)
1959b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM6,0x88)
1969b445006SKris Buschelman 
1979b445006SKris Buschelman   SSE_SHUFFLE(XMM6,XMM0,0xDD)
1989b445006SKris Buschelman 
19999148eceSKris Buschelman /* ----------------------------------------------- */
2009b445006SKris Buschelman 
2019b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM4)
2029b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM6)
2039b445006SKris Buschelman 
2049b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0xB1)
2059b445006SKris Buschelman 
2069b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM5)
2079b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM7)
2089b445006SKris Buschelman 
2099b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM3)
2109b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM7)
2119b445006SKris Buschelman 
2129b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2139b445006SKris Buschelman 
2149b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM5)
2159b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM7)
2169b445006SKris Buschelman   SSE_SUB_PS(XMM1,XMM0)
2179b445006SKris Buschelman 
2189b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM3)
2199b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM2)
2209b445006SKris Buschelman 
2219b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2229b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_4,XMM7)
2239b445006SKris Buschelman 
22499148eceSKris Buschelman /* ----------------------------------------------- */
2259b445006SKris Buschelman 
2269b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM5)
2279b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM4)
2289b445006SKris Buschelman 
2299b445006SKris Buschelman   SSE_SHUFFLE(XMM0,XMM0,0xB1)
2309b445006SKris Buschelman 
2319b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM6)
2329b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM0)
2339b445006SKris Buschelman   SSE_ADD_PS(XMM2,XMM1)
2349b445006SKris Buschelman 
2359b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM3)
2369b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM0)
2379b445006SKris Buschelman 
2389b445006SKris Buschelman   SSE_SHUFFLE(XMM0,XMM0,0x4E)
2399b445006SKris Buschelman 
2409b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM6)
2419b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM0)
2429b445006SKris Buschelman   SSE_SUB_PS(XMM2,XMM1)
2439b445006SKris Buschelman 
2449b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM3)
2459b445006SKris Buschelman   SSE_SUB_PS(XMM0,XMM7)
2469b445006SKris Buschelman 
2479b445006SKris Buschelman   SSE_SHUFFLE(XMM0,XMM0,0x4E)
2489b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_12,XMM0)
2499b445006SKris Buschelman 
25099148eceSKris Buschelman   /* ----------------------------------------------- */
2519b445006SKris Buschelman 
2529b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM5)
2539b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM5,0x4E)
2549b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM6)
2559b445006SKris Buschelman 
2569b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0xB1)
2579b445006SKris Buschelman 
2589b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0x4E)
2599b445006SKris Buschelman 
2609b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM4)
2619b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM7)
2629b445006SKris Buschelman   SSE_ADD_PS(XMM0,XMM2)
2639b445006SKris Buschelman 
2649b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM3)
2659b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM7)
2669b445006SKris Buschelman 
2679b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2689b445006SKris Buschelman 
2699b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM4)
2709b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM7)
2719b445006SKris Buschelman   SSE_SUB_PS(XMM0,XMM1)
2729b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_0,XMM0)
2739b445006SKris Buschelman 
2749b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM3)
2759b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM2)
2769b445006SKris Buschelman 
2779b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2789b445006SKris Buschelman 
27999148eceSKris Buschelman   /* ----------------------------------------------- */
2809b445006SKris Buschelman 
2819b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM3)
2829b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM5)
2839b445006SKris Buschelman 
2849b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0xB1)
2859b445006SKris Buschelman 
2869b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM6)
2879b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM1)
2889b445006SKris Buschelman   SSE_ADD_PS(XMM0,XMM7)
2899b445006SKris Buschelman 
2909b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM4)
2919b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM1)
2929b445006SKris Buschelman   SSE_SUB_PS_M(XMM2,SSE_ARG_1,FLOAT_12)
2939b445006SKris Buschelman 
2949b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0x4E)
2959b445006SKris Buschelman 
2969b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM6)
2979b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM1)
2989b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM0)
2999b445006SKris Buschelman 
3009b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM4)
3019b445006SKris Buschelman   SSE_SUB_PS(XMM2,XMM1)
3029b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_12,XMM2)
3039b445006SKris Buschelman 
30499148eceSKris Buschelman   /* ----------------------------------------------- */
3059b445006SKris Buschelman 
3069b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM3)
3079b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM6)
3089b445006SKris Buschelman 
3099b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0xB1)
3109b445006SKris Buschelman 
3119b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM4)
3129b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM1)
3139b445006SKris Buschelman   SSE_LOAD_PS(SSE_ARG_1,FLOAT_4,XMM0)
3149b445006SKris Buschelman   SSE_SUB_PS(XMM0,XMM2)
3159b445006SKris Buschelman 
3169b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM5)
3179b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM1)
3189b445006SKris Buschelman   SSE_ADD_PS(XMM2,XMM7)
3199b445006SKris Buschelman 
3209b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0x4E)
3219b445006SKris Buschelman 
3229b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM4)
3239b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM1)
3249b445006SKris Buschelman   SSE_ADD_PS(XMM7,XMM0)
3259b445006SKris Buschelman 
3269b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM5)
3279b445006SKris Buschelman   SSE_SUB_PS(XMM2,XMM1)
3289b445006SKris Buschelman 
32999148eceSKris Buschelman   /* ----------------------------------------------- */
3309b445006SKris Buschelman 
3319b445006SKris Buschelman   SSE_MULT_PS(XMM4,XMM3)
3329b445006SKris Buschelman 
3339b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0xB1)
3349b445006SKris Buschelman 
3359b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM6)
3369b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM4)
3379b445006SKris Buschelman   SSE_ADD_PS(XMM1,XMM7)
3389b445006SKris Buschelman 
3399b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM5)
3409b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM4)
3419b445006SKris Buschelman   SSE_LOAD_PS(SSE_ARG_1,FLOAT_12,XMM7)
3429b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM0)
3439b445006SKris Buschelman 
3449b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0x4E)
3459b445006SKris Buschelman 
3469b445006SKris Buschelman   SSE_MULT_PS(XMM6,XMM4)
3479b445006SKris Buschelman   SSE_SUB_PS(XMM1,XMM6)
3489b445006SKris Buschelman 
3499b445006SKris Buschelman   SSE_MULT_PS(XMM5,XMM4)
3509b445006SKris Buschelman   SSE_ADD_PS(XMM5,XMM7)
3519b445006SKris Buschelman 
3529b445006SKris Buschelman   /* ----------------------------------------------- */
3539b445006SKris Buschelman 
3549b445006SKris Buschelman   SSE_LOAD_PS(SSE_ARG_1,FLOAT_0,XMM0)
3559b445006SKris Buschelman   SSE_MULT_PS(XMM3,XMM0)
3569b445006SKris Buschelman 
3579b445006SKris Buschelman   SSE_COPY_PS(XMM4,XMM3)
3589b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM3,0x4E)
3599b445006SKris Buschelman   SSE_ADD_PS(XMM4,XMM3)
3609b445006SKris Buschelman 
3619b445006SKris Buschelman   SSE_COPY_PS(XMM6,XMM4)
3629b445006SKris Buschelman   SSE_SHUFFLE(XMM6,XMM4,0xB1)
3639b445006SKris Buschelman   SSE_ADD_SS(XMM6,XMM4)
3649b445006SKris Buschelman 
3659b445006SKris Buschelman   SSE_COPY_PS(XMM3,XMM6)
3669b445006SKris Buschelman   SSE_RECIP_SS(XMM3,XMM6)
3679b445006SKris Buschelman   SSE_COPY_SS(XMM4,XMM3)
3689b445006SKris Buschelman   SSE_ADD_SS(XMM4,XMM3)
3699b445006SKris Buschelman   SSE_MULT_SS(XMM3,XMM3)
3709b445006SKris Buschelman   SSE_MULT_SS(XMM6,XMM3)
3719b445006SKris Buschelman   SSE_SUB_SS(XMM4,XMM6)
3729b445006SKris Buschelman 
3739b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0x00)
3749b445006SKris Buschelman 
3759b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM4)
3769b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_0,XMM0)
3779b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_2,XMM0)
3789b445006SKris Buschelman 
3799b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM4)
3809b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_4,XMM1)
3819b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_6,XMM1)
3829b445006SKris Buschelman 
3839b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM4)
3849b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_8,XMM2)
3859b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_10,XMM2)
3869b445006SKris Buschelman 
3879b445006SKris Buschelman   SSE_MULT_PS(XMM4,XMM5)
3889b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_12,XMM4)
3899b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_14,XMM4)
3909b445006SKris Buschelman 
3919b445006SKris Buschelman   /* ----------------------------------------------- */
3929b445006SKris Buschelman 
3939b445006SKris Buschelman   SSE_INLINE_END_1;
3949b445006SKris Buschelman   SSE_SCOPE_END;
39599148eceSKris Buschelman   PetscFunctionReturn(0);
39699148eceSKris Buschelman }
39799148eceSKris Buschelman 
39899148eceSKris Buschelman #endif
39999148eceSKris Buschelman 
40099148eceSKris Buschelman 
401