xref: /petsc/src/mat/impls/baij/seq/dgefa4.c (revision ec1892c82279d5cc06e9d5039e7affb2a767a275)
1be1d678aSKris Buschelman 
24224c193SBarry Smith /*
3*ec1892c8SHong Zhang        Inverts 4 by 4 matrix using gaussian elimination with partial pivoting.
471c5468dSBarry Smith 
571c5468dSBarry Smith        Used by the sparse factorization routines in
6dd882469SBarry Smith      src/mat/impls/baij/seq
771c5468dSBarry Smith 
871c5468dSBarry Smith        This is a combination of the Linpack routines
971c5468dSBarry Smith     dgefa() and dgedi() specialized for a size of 4.
1071c5468dSBarry Smith 
114224c193SBarry Smith */
12c6db04a5SJed Brown #include <petscsys.h>
134224c193SBarry Smith 
144a2ae208SSatish Balay #undef __FUNCT__
1596b95a6bSBarry Smith #define __FUNCT__ "PetscKernel_A_gets_inverse_A_4"
162e92ee13SHong Zhang PETSC_EXTERN PetscErrorCode PetscKernel_A_gets_inverse_A_4(MatScalar *a,PetscReal shift,PetscBool allowzeropivot,PetscBool *zeropivotdetected)
174224c193SBarry Smith {
18690b6cddSBarry Smith   PetscInt  i__2,i__3,kp1,j,k,l,ll,i,ipvt[4],kb,k3;
19690b6cddSBarry Smith   PetscInt  k4,j3;
20b48ee343SBarry Smith   MatScalar *aa,*ax,*ay,work[16],stmp;
21329f5518SBarry Smith   MatReal   tmp,max;
224224c193SBarry Smith 
233a40ed3dSBarry Smith   PetscFunctionBegin;
24c80103daSHong Zhang   if (zeropivotdetected) *zeropivotdetected = PETSC_FALSE;
250daa89e9SHong Zhang   shift = .25*shift*(1.e-12 + PetscAbsScalar(a[0]) + PetscAbsScalar(a[5]) + PetscAbsScalar(a[10]) + PetscAbsScalar(a[15]));
26*ec1892c8SHong Zhang 
274224c193SBarry Smith   /* Parameter adjustments */
288a36c062SBarry Smith   a -= 5;
294224c193SBarry Smith 
308a36c062SBarry Smith   for (k = 1; k <= 3; ++k) {
314224c193SBarry Smith     kp1 = k + 1;
328a36c062SBarry Smith     k3  = 4*k;
334224c193SBarry Smith     k4  = k3 + k;
344224c193SBarry Smith 
35*ec1892c8SHong Zhang     /* find l = pivot index */
36ed33f8a5SSatish Balay     i__2 = 5 - k;
374224c193SBarry Smith     aa   = &a[k4];
384224c193SBarry Smith     max  = PetscAbsScalar(aa[0]);
394224c193SBarry Smith     l    = 1;
404224c193SBarry Smith     for (ll=1; ll<i__2; ll++) {
414224c193SBarry Smith       tmp = PetscAbsScalar(aa[ll]);
424224c193SBarry Smith       if (tmp > max) { max = tmp; l = ll+1;}
434224c193SBarry Smith     }
444224c193SBarry Smith     l        += k - 1;
45da10e913SBarry Smith     ipvt[k-1] = l;
464224c193SBarry Smith 
475b8514ebSBarry Smith     if (a[l + k3] == 0.0) {
48*ec1892c8SHong Zhang       if (shift == 0.0) {
49*ec1892c8SHong Zhang         if (allowzeropivot) {
50*ec1892c8SHong Zhang           PetscErrorCode ierr;
51*ec1892c8SHong Zhang           ierr = PetscInfo1(NULL,"Zero pivot, row %D\n",k-1);CHKERRQ(ierr);
52*ec1892c8SHong Zhang           *zeropivotdetected = PETSC_TRUE;
53*ec1892c8SHong Zhang         } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot, row %D",k-1);
54*ec1892c8SHong Zhang       } else {
551bfe5b3dSBarry Smith         /* SHIFT is applied to SINGLE diagonal entry; does this make any sense? */
561bfe5b3dSBarry Smith         a[l + k3] = shift;
571bfe5b3dSBarry Smith       }
584224c193SBarry Smith     }
594224c193SBarry Smith 
604224c193SBarry Smith     /* interchange if necessary */
614224c193SBarry Smith     if (l != k) {
624224c193SBarry Smith       stmp      = a[l + k3];
634224c193SBarry Smith       a[l + k3] = a[k4];
644224c193SBarry Smith       a[k4]     = stmp;
654224c193SBarry Smith     }
664224c193SBarry Smith 
674224c193SBarry Smith     /* compute multipliers */
684224c193SBarry Smith     stmp = -1. / a[k4];
698a36c062SBarry Smith     i__2 = 4 - k;
704224c193SBarry Smith     aa   = &a[1 + k4];
7126fbe8dcSKarl Rupp     for (ll=0; ll<i__2; ll++) aa[ll] *= stmp;
724224c193SBarry Smith 
734224c193SBarry Smith     /* row elimination with column indexing */
744224c193SBarry Smith     ax = &a[k4+1];
758a36c062SBarry Smith     for (j = kp1; j <= 4; ++j) {
768a36c062SBarry Smith       j3   = 4*j;
774224c193SBarry Smith       stmp = a[l + j3];
784224c193SBarry Smith       if (l != k) {
794224c193SBarry Smith         a[l + j3] = a[k + j3];
804224c193SBarry Smith         a[k + j3] = stmp;
814224c193SBarry Smith       }
824224c193SBarry Smith 
838a36c062SBarry Smith       i__3 = 4 - k;
844224c193SBarry Smith       ay   = &a[1+k+j3];
8526fbe8dcSKarl Rupp       for (ll=0; ll<i__3; ll++) ay[ll] += stmp*ax[ll];
864224c193SBarry Smith     }
874224c193SBarry Smith   }
88da10e913SBarry Smith   ipvt[3] = 4;
892e92ee13SHong Zhang   if (a[20] == 0.0) {
902e92ee13SHong Zhang     if (allowzeropivot) {
91*ec1892c8SHong Zhang       PetscErrorCode ierr;
922e92ee13SHong Zhang       ierr = PetscInfo1(NULL,"Zero pivot, row %D\n",3);CHKERRQ(ierr);
932e92ee13SHong Zhang       *zeropivotdetected = PETSC_TRUE;
942e92ee13SHong Zhang     } else SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot, row %D",3);
952e92ee13SHong Zhang   }
964224c193SBarry Smith 
97*ec1892c8SHong Zhang   /* Now form the inverse */
984224c193SBarry Smith   /* compute inverse(u) */
998a36c062SBarry Smith   for (k = 1; k <= 4; ++k) {
1008a36c062SBarry Smith     k3    = 4*k;
1014224c193SBarry Smith     k4    = k3 + k;
1024224c193SBarry Smith     a[k4] = 1.0 / a[k4];
1034224c193SBarry Smith     stmp  = -a[k4];
1044224c193SBarry Smith     i__2  = k - 1;
1054224c193SBarry Smith     aa    = &a[k3 + 1];
1064224c193SBarry Smith     for (ll=0; ll<i__2; ll++) aa[ll] *= stmp;
1074224c193SBarry Smith     kp1 = k + 1;
1088a36c062SBarry Smith     if (4 < kp1) continue;
1094224c193SBarry Smith     ax = aa;
1108a36c062SBarry Smith     for (j = kp1; j <= 4; ++j) {
1118a36c062SBarry Smith       j3        = 4*j;
1124224c193SBarry Smith       stmp      = a[k + j3];
1134224c193SBarry Smith       a[k + j3] = 0.0;
1144224c193SBarry Smith       ay        = &a[j3 + 1];
11526fbe8dcSKarl Rupp       for (ll=0; ll<k; ll++) ay[ll] += stmp*ax[ll];
1164224c193SBarry Smith     }
1174224c193SBarry Smith   }
1184224c193SBarry Smith 
1194224c193SBarry Smith   /* form inverse(u)*inverse(l) */
1208a36c062SBarry Smith   for (kb = 1; kb <= 3; ++kb) {
1218a36c062SBarry Smith     k   = 4 - kb;
1228a36c062SBarry Smith     k3  = 4*k;
1234224c193SBarry Smith     kp1 = k + 1;
1244224c193SBarry Smith     aa  = a + k3;
1258a36c062SBarry Smith     for (i = kp1; i <= 4; ++i) {
126b48ee343SBarry Smith       work[i-1] = aa[i];
1274224c193SBarry Smith       aa[i]     = 0.0;
1284224c193SBarry Smith     }
1298a36c062SBarry Smith     for (j = kp1; j <= 4; ++j) {
130b48ee343SBarry Smith       stmp   = work[j-1];
1318a36c062SBarry Smith       ax     = &a[4*j + 1];
1324224c193SBarry Smith       ay     = &a[k3 + 1];
1334224c193SBarry Smith       ay[0] += stmp*ax[0];
1344224c193SBarry Smith       ay[1] += stmp*ax[1];
1354224c193SBarry Smith       ay[2] += stmp*ax[2];
1368a36c062SBarry Smith       ay[3] += stmp*ax[3];
1374224c193SBarry Smith     }
138da10e913SBarry Smith     l = ipvt[k-1];
1394224c193SBarry Smith     if (l != k) {
1404224c193SBarry Smith       ax   = &a[k3 + 1];
1418a36c062SBarry Smith       ay   = &a[4*l + 1];
1424224c193SBarry Smith       stmp = ax[0]; ax[0] = ay[0]; ay[0] = stmp;
1434224c193SBarry Smith       stmp = ax[1]; ax[1] = ay[1]; ay[1] = stmp;
1444224c193SBarry Smith       stmp = ax[2]; ax[2] = ay[2]; ay[2] = stmp;
1458a36c062SBarry Smith       stmp = ax[3]; ax[3] = ay[3]; ay[3] = stmp;
1464224c193SBarry Smith     }
1474224c193SBarry Smith   }
1483a40ed3dSBarry Smith   PetscFunctionReturn(0);
1494224c193SBarry Smith }
1504224c193SBarry Smith 
1519b445006SKris Buschelman #if defined(PETSC_HAVE_SSE)
1529b445006SKris Buschelman #include PETSC_HAVE_SSE
15399148eceSKris Buschelman 
15499148eceSKris Buschelman #undef __FUNCT__
15596b95a6bSBarry Smith #define __FUNCT__ "PetscKernel_A_gets_inverse_A_4_SSE"
156d4b5b1e2SBarry Smith PETSC_EXTERN PetscErrorCode PetscKernel_A_gets_inverse_A_4_SSE(float *a)
15799148eceSKris Buschelman {
15899148eceSKris Buschelman   /*
1599b445006SKris Buschelman      This routine is converted from Intel's Small Matrix Library.
16099148eceSKris Buschelman      See: Streaming SIMD Extensions -- Inverse of 4x4 Matrix
16199148eceSKris Buschelman      Order Number: 245043-001
16299148eceSKris Buschelman      March 1999
16399148eceSKris Buschelman      http://www.intel.com
16499148eceSKris Buschelman 
16599148eceSKris Buschelman      Inverse of a 4x4 matrix via Kramer's Rule:
16699148eceSKris Buschelman      bool Invert4x4(SMLXMatrix &);
16799148eceSKris Buschelman   */
16899148eceSKris Buschelman   PetscFunctionBegin;
1699b445006SKris Buschelman   SSE_SCOPE_BEGIN;
1709b445006SKris Buschelman   SSE_INLINE_BEGIN_1(a)
1719b445006SKris Buschelman 
17299148eceSKris Buschelman /* ----------------------------------------------- */
1739b445006SKris Buschelman 
1749b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_0,XMM0)
1759b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_4,XMM0)
1769b445006SKris Buschelman 
1779b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_8,XMM5)
1789b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_12,XMM5)
1799b445006SKris Buschelman 
1809b445006SKris Buschelman   SSE_COPY_PS(XMM3,XMM0)
1819b445006SKris Buschelman   SSE_SHUFFLE(XMM3,XMM5,0x88)
1829b445006SKris Buschelman 
1839b445006SKris Buschelman   SSE_SHUFFLE(XMM5,XMM0,0xDD)
1849b445006SKris Buschelman 
1859b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_2,XMM0)
1869b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_6,XMM0)
1879b445006SKris Buschelman 
1889b445006SKris Buschelman   SSE_LOADL_PS(SSE_ARG_1,FLOAT_10,XMM6)
1899b445006SKris Buschelman   SSE_LOADH_PS(SSE_ARG_1,FLOAT_14,XMM6)
1909b445006SKris Buschelman 
1919b445006SKris Buschelman   SSE_COPY_PS(XMM4,XMM0)
1929b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM6,0x88)
1939b445006SKris Buschelman 
1949b445006SKris Buschelman   SSE_SHUFFLE(XMM6,XMM0,0xDD)
1959b445006SKris Buschelman 
19699148eceSKris Buschelman /* ----------------------------------------------- */
1979b445006SKris Buschelman 
1989b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM4)
1999b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM6)
2009b445006SKris Buschelman 
2019b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0xB1)
2029b445006SKris Buschelman 
2039b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM5)
2049b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM7)
2059b445006SKris Buschelman 
2069b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM3)
2079b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM7)
2089b445006SKris Buschelman 
2099b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2109b445006SKris Buschelman 
2119b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM5)
2129b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM7)
2139b445006SKris Buschelman   SSE_SUB_PS(XMM1,XMM0)
2149b445006SKris Buschelman 
2159b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM3)
2169b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM2)
2179b445006SKris Buschelman 
2189b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2199b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_4,XMM7)
2209b445006SKris Buschelman 
22199148eceSKris Buschelman /* ----------------------------------------------- */
2229b445006SKris Buschelman 
2239b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM5)
2249b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM4)
2259b445006SKris Buschelman 
2269b445006SKris Buschelman   SSE_SHUFFLE(XMM0,XMM0,0xB1)
2279b445006SKris Buschelman 
2289b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM6)
2299b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM0)
2309b445006SKris Buschelman   SSE_ADD_PS(XMM2,XMM1)
2319b445006SKris Buschelman 
2329b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM3)
2339b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM0)
2349b445006SKris Buschelman 
2359b445006SKris Buschelman   SSE_SHUFFLE(XMM0,XMM0,0x4E)
2369b445006SKris Buschelman 
2379b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM6)
2389b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM0)
2399b445006SKris Buschelman   SSE_SUB_PS(XMM2,XMM1)
2409b445006SKris Buschelman 
2419b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM3)
2429b445006SKris Buschelman   SSE_SUB_PS(XMM0,XMM7)
2439b445006SKris Buschelman 
2449b445006SKris Buschelman   SSE_SHUFFLE(XMM0,XMM0,0x4E)
2459b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_12,XMM0)
2469b445006SKris Buschelman 
24799148eceSKris Buschelman   /* ----------------------------------------------- */
2489b445006SKris Buschelman 
2499b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM5)
2509b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM5,0x4E)
2519b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM6)
2529b445006SKris Buschelman 
2539b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0xB1)
2549b445006SKris Buschelman 
2559b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0x4E)
2569b445006SKris Buschelman 
2579b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM4)
2589b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM7)
2599b445006SKris Buschelman   SSE_ADD_PS(XMM0,XMM2)
2609b445006SKris Buschelman 
2619b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM3)
2629b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM7)
2639b445006SKris Buschelman 
2649b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2659b445006SKris Buschelman 
2669b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM4)
2679b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM7)
2689b445006SKris Buschelman   SSE_SUB_PS(XMM0,XMM1)
2699b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_0,XMM0)
2709b445006SKris Buschelman 
2719b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM3)
2729b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM2)
2739b445006SKris Buschelman 
2749b445006SKris Buschelman   SSE_SHUFFLE(XMM7,XMM7,0x4E)
2759b445006SKris Buschelman 
27699148eceSKris Buschelman   /* ----------------------------------------------- */
2779b445006SKris Buschelman 
2789b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM3)
2799b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM5)
2809b445006SKris Buschelman 
2819b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0xB1)
2829b445006SKris Buschelman 
2839b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM6)
2849b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM1)
2859b445006SKris Buschelman   SSE_ADD_PS(XMM0,XMM7)
2869b445006SKris Buschelman 
2879b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM4)
2889b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM1)
2899b445006SKris Buschelman   SSE_SUB_PS_M(XMM2,SSE_ARG_1,FLOAT_12)
2909b445006SKris Buschelman 
2919b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0x4E)
2929b445006SKris Buschelman 
2939b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM6)
2949b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM1)
2959b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM0)
2969b445006SKris Buschelman 
2979b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM4)
2989b445006SKris Buschelman   SSE_SUB_PS(XMM2,XMM1)
2999b445006SKris Buschelman   SSE_STORE_PS(SSE_ARG_1,FLOAT_12,XMM2)
3009b445006SKris Buschelman 
30199148eceSKris Buschelman   /* ----------------------------------------------- */
3029b445006SKris Buschelman 
3039b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM3)
3049b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM6)
3059b445006SKris Buschelman 
3069b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0xB1)
3079b445006SKris Buschelman 
3089b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM4)
3099b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM1)
3109b445006SKris Buschelman   SSE_LOAD_PS(SSE_ARG_1,FLOAT_4,XMM0)
3119b445006SKris Buschelman   SSE_SUB_PS(XMM0,XMM2)
3129b445006SKris Buschelman 
3139b445006SKris Buschelman   SSE_COPY_PS(XMM2,XMM5)
3149b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM1)
3159b445006SKris Buschelman   SSE_ADD_PS(XMM2,XMM7)
3169b445006SKris Buschelman 
3179b445006SKris Buschelman   SSE_SHUFFLE(XMM1,XMM1,0x4E)
3189b445006SKris Buschelman 
3199b445006SKris Buschelman   SSE_COPY_PS(XMM7,XMM4)
3209b445006SKris Buschelman   SSE_MULT_PS(XMM7,XMM1)
3219b445006SKris Buschelman   SSE_ADD_PS(XMM7,XMM0)
3229b445006SKris Buschelman 
3239b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM5)
3249b445006SKris Buschelman   SSE_SUB_PS(XMM2,XMM1)
3259b445006SKris Buschelman 
32699148eceSKris Buschelman   /* ----------------------------------------------- */
3279b445006SKris Buschelman 
3289b445006SKris Buschelman   SSE_MULT_PS(XMM4,XMM3)
3299b445006SKris Buschelman 
3309b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0xB1)
3319b445006SKris Buschelman 
3329b445006SKris Buschelman   SSE_COPY_PS(XMM1,XMM6)
3339b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM4)
3349b445006SKris Buschelman   SSE_ADD_PS(XMM1,XMM7)
3359b445006SKris Buschelman 
3369b445006SKris Buschelman   SSE_COPY_PS(XMM0,XMM5)
3379b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM4)
3389b445006SKris Buschelman   SSE_LOAD_PS(SSE_ARG_1,FLOAT_12,XMM7)
3399b445006SKris Buschelman   SSE_SUB_PS(XMM7,XMM0)
3409b445006SKris Buschelman 
3419b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0x4E)
3429b445006SKris Buschelman 
3439b445006SKris Buschelman   SSE_MULT_PS(XMM6,XMM4)
3449b445006SKris Buschelman   SSE_SUB_PS(XMM1,XMM6)
3459b445006SKris Buschelman 
3469b445006SKris Buschelman   SSE_MULT_PS(XMM5,XMM4)
3479b445006SKris Buschelman   SSE_ADD_PS(XMM5,XMM7)
3489b445006SKris Buschelman 
3499b445006SKris Buschelman   /* ----------------------------------------------- */
3509b445006SKris Buschelman 
3519b445006SKris Buschelman   SSE_LOAD_PS(SSE_ARG_1,FLOAT_0,XMM0)
3529b445006SKris Buschelman   SSE_MULT_PS(XMM3,XMM0)
3539b445006SKris Buschelman 
3549b445006SKris Buschelman   SSE_COPY_PS(XMM4,XMM3)
3559b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM3,0x4E)
3569b445006SKris Buschelman   SSE_ADD_PS(XMM4,XMM3)
3579b445006SKris Buschelman 
3589b445006SKris Buschelman   SSE_COPY_PS(XMM6,XMM4)
3599b445006SKris Buschelman   SSE_SHUFFLE(XMM6,XMM4,0xB1)
3609b445006SKris Buschelman   SSE_ADD_SS(XMM6,XMM4)
3619b445006SKris Buschelman 
3629b445006SKris Buschelman   SSE_COPY_PS(XMM3,XMM6)
3639b445006SKris Buschelman   SSE_RECIP_SS(XMM3,XMM6)
3649b445006SKris Buschelman   SSE_COPY_SS(XMM4,XMM3)
3659b445006SKris Buschelman   SSE_ADD_SS(XMM4,XMM3)
3669b445006SKris Buschelman   SSE_MULT_SS(XMM3,XMM3)
3679b445006SKris Buschelman   SSE_MULT_SS(XMM6,XMM3)
3689b445006SKris Buschelman   SSE_SUB_SS(XMM4,XMM6)
3699b445006SKris Buschelman 
3709b445006SKris Buschelman   SSE_SHUFFLE(XMM4,XMM4,0x00)
3719b445006SKris Buschelman 
3729b445006SKris Buschelman   SSE_MULT_PS(XMM0,XMM4)
3739b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_0,XMM0)
3749b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_2,XMM0)
3759b445006SKris Buschelman 
3769b445006SKris Buschelman   SSE_MULT_PS(XMM1,XMM4)
3779b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_4,XMM1)
3789b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_6,XMM1)
3799b445006SKris Buschelman 
3809b445006SKris Buschelman   SSE_MULT_PS(XMM2,XMM4)
3819b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_8,XMM2)
3829b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_10,XMM2)
3839b445006SKris Buschelman 
3849b445006SKris Buschelman   SSE_MULT_PS(XMM4,XMM5)
3859b445006SKris Buschelman   SSE_STOREL_PS(SSE_ARG_1,FLOAT_12,XMM4)
3869b445006SKris Buschelman   SSE_STOREH_PS(SSE_ARG_1,FLOAT_14,XMM4)
3879b445006SKris Buschelman 
3889b445006SKris Buschelman   /* ----------------------------------------------- */
3899b445006SKris Buschelman 
3909b445006SKris Buschelman   SSE_INLINE_END_1;
3919b445006SKris Buschelman   SSE_SCOPE_END;
39299148eceSKris Buschelman   PetscFunctionReturn(0);
39399148eceSKris Buschelman }
39499148eceSKris Buschelman 
39599148eceSKris Buschelman #endif
39699148eceSKris Buschelman 
39799148eceSKris Buschelman 
398