1be1d678aSKris Buschelman #define PETSCMAT_DLL 2be1d678aSKris Buschelman 3ed3cc1f0SBarry Smith /* 4ed3cc1f0SBarry Smith Basic functions for basic parallel dense matrices. 5ed3cc1f0SBarry Smith */ 6ed3cc1f0SBarry Smith 704fea9ffSBarry Smith 87c4f633dSBarry Smith #include "../src/mat/impls/dense/mpi/mpidense.h" /*I "petscmat.h" I*/ 9eae6fb2eSBarry Smith #if defined(PETSC_HAVE_PLAPACK) 10eae6fb2eSBarry Smith static PetscMPIInt Plapack_nprows,Plapack_npcols,Plapack_ierror,Plapack_nb_alg; 11eae6fb2eSBarry Smith static MPI_Comm Plapack_comm_2d; 12d84338a6SBarry Smith EXTERN_C_BEGIN 13d84338a6SBarry Smith #include "PLA.h" 14d84338a6SBarry Smith EXTERN_C_END 15d84338a6SBarry Smith 16d84338a6SBarry Smith typedef struct { 17d84338a6SBarry Smith PLA_Obj A,pivots; 18d84338a6SBarry Smith PLA_Template templ; 19d84338a6SBarry Smith MPI_Datatype datatype; 20d84338a6SBarry Smith PetscInt nb,rstart; 21d84338a6SBarry Smith VecScatter ctx; 22d84338a6SBarry Smith IS is_pla,is_petsc; 23ace3abfcSBarry Smith PetscBool pla_solved; 24d84338a6SBarry Smith MatStructure mstruct; 25d84338a6SBarry Smith } Mat_Plapack; 26eae6fb2eSBarry Smith #endif 278965ea79SLois Curfman McInnes 28ba8c8a56SBarry Smith #undef __FUNCT__ 29ab92ecdeSBarry Smith #define __FUNCT__ "MatDenseGetLocalMatrix" 30ab92ecdeSBarry Smith /*@ 31ab92ecdeSBarry Smith 32ab92ecdeSBarry Smith MatDenseGetLocalMatrix - For a MATMPIDENSE or MATSEQDENSE matrix returns the sequential 33ab92ecdeSBarry Smith matrix that represents the operator. For sequential matrices it returns itself. 34ab92ecdeSBarry Smith 35ab92ecdeSBarry Smith Input Parameter: 36ab92ecdeSBarry Smith . A - the Seq or MPI dense matrix 37ab92ecdeSBarry Smith 38ab92ecdeSBarry Smith Output Parameter: 39ab92ecdeSBarry Smith . B - the inner matrix 40ab92ecdeSBarry Smith 418e6c10adSSatish Balay Level: intermediate 428e6c10adSSatish Balay 43ab92ecdeSBarry Smith @*/ 44ab92ecdeSBarry Smith PetscErrorCode MatDenseGetLocalMatrix(Mat A,Mat *B) 45ab92ecdeSBarry Smith { 46ab92ecdeSBarry Smith Mat_MPIDense *mat = (Mat_MPIDense*)A->data; 47ab92ecdeSBarry Smith PetscErrorCode ierr; 48ace3abfcSBarry Smith PetscBool flg; 49ab92ecdeSBarry Smith 50ab92ecdeSBarry Smith PetscFunctionBegin; 51ab92ecdeSBarry Smith ierr = PetscTypeCompare((PetscObject)A,MATMPIDENSE,&flg);CHKERRQ(ierr); 52ab92ecdeSBarry Smith if (flg) { 53ab92ecdeSBarry Smith *B = mat->A; 54ab92ecdeSBarry Smith } else { 55ab92ecdeSBarry Smith *B = A; 56ab92ecdeSBarry Smith } 57ab92ecdeSBarry Smith PetscFunctionReturn(0); 58ab92ecdeSBarry Smith } 59ab92ecdeSBarry Smith 60ab92ecdeSBarry Smith #undef __FUNCT__ 61ba8c8a56SBarry Smith #define __FUNCT__ "MatGetRow_MPIDense" 62ba8c8a56SBarry Smith PetscErrorCode MatGetRow_MPIDense(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 63ba8c8a56SBarry Smith { 64ba8c8a56SBarry Smith Mat_MPIDense *mat = (Mat_MPIDense*)A->data; 65ba8c8a56SBarry Smith PetscErrorCode ierr; 66d0f46423SBarry Smith PetscInt lrow,rstart = A->rmap->rstart,rend = A->rmap->rend; 67ba8c8a56SBarry Smith 68ba8c8a56SBarry Smith PetscFunctionBegin; 69e7e72b3dSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"only local rows"); 70ba8c8a56SBarry Smith lrow = row - rstart; 71ba8c8a56SBarry Smith ierr = MatGetRow(mat->A,lrow,nz,(const PetscInt **)idx,(const PetscScalar **)v);CHKERRQ(ierr); 72ba8c8a56SBarry Smith PetscFunctionReturn(0); 73ba8c8a56SBarry Smith } 74ba8c8a56SBarry Smith 75ba8c8a56SBarry Smith #undef __FUNCT__ 76ba8c8a56SBarry Smith #define __FUNCT__ "MatRestoreRow_MPIDense" 77ba8c8a56SBarry Smith PetscErrorCode MatRestoreRow_MPIDense(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 78ba8c8a56SBarry Smith { 79ba8c8a56SBarry Smith PetscErrorCode ierr; 80ba8c8a56SBarry Smith 81ba8c8a56SBarry Smith PetscFunctionBegin; 82ba8c8a56SBarry Smith if (idx) {ierr = PetscFree(*idx);CHKERRQ(ierr);} 83ba8c8a56SBarry Smith if (v) {ierr = PetscFree(*v);CHKERRQ(ierr);} 84ba8c8a56SBarry Smith PetscFunctionReturn(0); 85ba8c8a56SBarry Smith } 86ba8c8a56SBarry Smith 870de54da6SSatish Balay EXTERN_C_BEGIN 884a2ae208SSatish Balay #undef __FUNCT__ 894a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonalBlock_MPIDense" 90ace3abfcSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetDiagonalBlock_MPIDense(Mat A,PetscBool *iscopy,MatReuse reuse,Mat *B) 910de54da6SSatish Balay { 920de54da6SSatish Balay Mat_MPIDense *mdn = (Mat_MPIDense*)A->data; 936849ba73SBarry Smith PetscErrorCode ierr; 94d0f46423SBarry Smith PetscInt m = A->rmap->n,rstart = A->rmap->rstart; 9587828ca2SBarry Smith PetscScalar *array; 960de54da6SSatish Balay MPI_Comm comm; 970de54da6SSatish Balay 980de54da6SSatish Balay PetscFunctionBegin; 99e32f2f54SBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only square matrices supported."); 1000de54da6SSatish Balay 1010de54da6SSatish Balay /* The reuse aspect is not implemented efficiently */ 1020de54da6SSatish Balay if (reuse) { ierr = MatDestroy(*B);CHKERRQ(ierr);} 1030de54da6SSatish Balay 1040de54da6SSatish Balay ierr = PetscObjectGetComm((PetscObject)(mdn->A),&comm);CHKERRQ(ierr); 1050de54da6SSatish Balay ierr = MatGetArray(mdn->A,&array);CHKERRQ(ierr); 106f69a0ea3SMatthew Knepley ierr = MatCreate(comm,B);CHKERRQ(ierr); 107f69a0ea3SMatthew Knepley ierr = MatSetSizes(*B,m,m,m,m);CHKERRQ(ierr); 1087adad957SLisandro Dalcin ierr = MatSetType(*B,((PetscObject)mdn->A)->type_name);CHKERRQ(ierr); 1095c5985e7SKris Buschelman ierr = MatSeqDenseSetPreallocation(*B,array+m*rstart);CHKERRQ(ierr); 1100de54da6SSatish Balay ierr = MatRestoreArray(mdn->A,&array);CHKERRQ(ierr); 1110de54da6SSatish Balay ierr = MatAssemblyBegin(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1120de54da6SSatish Balay ierr = MatAssemblyEnd(*B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1130de54da6SSatish Balay 1140de54da6SSatish Balay *iscopy = PETSC_TRUE; 1150de54da6SSatish Balay PetscFunctionReturn(0); 1160de54da6SSatish Balay } 1170de54da6SSatish Balay EXTERN_C_END 1180de54da6SSatish Balay 1194a2ae208SSatish Balay #undef __FUNCT__ 1204a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIDense" 12113f74950SBarry Smith PetscErrorCode MatSetValues_MPIDense(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],const PetscScalar v[],InsertMode addv) 1228965ea79SLois Curfman McInnes { 12339b7565bSBarry Smith Mat_MPIDense *A = (Mat_MPIDense*)mat->data; 124dfbe8321SBarry Smith PetscErrorCode ierr; 125d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend,row; 126ace3abfcSBarry Smith PetscBool roworiented = A->roworiented; 1278965ea79SLois Curfman McInnes 1283a40ed3dSBarry Smith PetscFunctionBegin; 12971fd2e92SBarry Smith if (v) PetscValidScalarPointer(v,6); 1308965ea79SLois Curfman McInnes for (i=0; i<m; i++) { 1315ef9f2a5SBarry Smith if (idxm[i] < 0) continue; 132e32f2f54SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large"); 1338965ea79SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 1348965ea79SLois Curfman McInnes row = idxm[i] - rstart; 13539b7565bSBarry Smith if (roworiented) { 13639b7565bSBarry Smith ierr = MatSetValues(A->A,1,&row,n,idxn,v+i*n,addv);CHKERRQ(ierr); 1373a40ed3dSBarry Smith } else { 1388965ea79SLois Curfman McInnes for (j=0; j<n; j++) { 1395ef9f2a5SBarry Smith if (idxn[j] < 0) continue; 140e32f2f54SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large"); 14139b7565bSBarry Smith ierr = MatSetValues(A->A,1,&row,1,&idxn[j],v+i+j*m,addv);CHKERRQ(ierr); 14239b7565bSBarry Smith } 1438965ea79SLois Curfman McInnes } 1443a40ed3dSBarry Smith } else { 1453782ba37SSatish Balay if (!A->donotstash) { 14639b7565bSBarry Smith if (roworiented) { 147b400d20cSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,idxm[i],n,idxn,v+i*n,PETSC_FALSE);CHKERRQ(ierr); 148d36fbae8SSatish Balay } else { 149b400d20cSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,idxm[i],n,idxn,v+i,m,PETSC_FALSE);CHKERRQ(ierr); 15039b7565bSBarry Smith } 151b49de8d1SLois Curfman McInnes } 152b49de8d1SLois Curfman McInnes } 1533782ba37SSatish Balay } 1543a40ed3dSBarry Smith PetscFunctionReturn(0); 155b49de8d1SLois Curfman McInnes } 156b49de8d1SLois Curfman McInnes 1574a2ae208SSatish Balay #undef __FUNCT__ 1584a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIDense" 15913f74950SBarry Smith PetscErrorCode MatGetValues_MPIDense(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 160b49de8d1SLois Curfman McInnes { 161b49de8d1SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 162dfbe8321SBarry Smith PetscErrorCode ierr; 163d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend,row; 164b49de8d1SLois Curfman McInnes 1653a40ed3dSBarry Smith PetscFunctionBegin; 166b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 167e32f2f54SBarry Smith if (idxm[i] < 0) continue; /* SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row"); */ 168e32f2f54SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large"); 169b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 170b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 171b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 172e32f2f54SBarry Smith if (idxn[j] < 0) continue; /* SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column"); */ 173e7e72b3dSBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large"); 174b49de8d1SLois Curfman McInnes ierr = MatGetValues(mdn->A,1,&row,1,&idxn[j],v+i*n+j);CHKERRQ(ierr); 175b49de8d1SLois Curfman McInnes } 176e7e72b3dSBarry Smith } else SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported"); 1778965ea79SLois Curfman McInnes } 1783a40ed3dSBarry Smith PetscFunctionReturn(0); 1798965ea79SLois Curfman McInnes } 1808965ea79SLois Curfman McInnes 1814a2ae208SSatish Balay #undef __FUNCT__ 1824a2ae208SSatish Balay #define __FUNCT__ "MatGetArray_MPIDense" 183dfbe8321SBarry Smith PetscErrorCode MatGetArray_MPIDense(Mat A,PetscScalar *array[]) 184ff14e315SSatish Balay { 185ff14e315SSatish Balay Mat_MPIDense *a = (Mat_MPIDense*)A->data; 186dfbe8321SBarry Smith PetscErrorCode ierr; 187ff14e315SSatish Balay 1883a40ed3dSBarry Smith PetscFunctionBegin; 189ff14e315SSatish Balay ierr = MatGetArray(a->A,array);CHKERRQ(ierr); 1903a40ed3dSBarry Smith PetscFunctionReturn(0); 191ff14e315SSatish Balay } 192ff14e315SSatish Balay 1934a2ae208SSatish Balay #undef __FUNCT__ 1944a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIDense" 1954aa3045dSJed Brown static PetscErrorCode MatGetSubMatrix_MPIDense(Mat A,IS isrow,IS iscol,MatReuse scall,Mat *B) 196ca3fa75bSLois Curfman McInnes { 197ca3fa75bSLois Curfman McInnes Mat_MPIDense *mat = (Mat_MPIDense*)A->data,*newmatd; 198ca3fa75bSLois Curfman McInnes Mat_SeqDense *lmat = (Mat_SeqDense*)mat->A->data; 1996849ba73SBarry Smith PetscErrorCode ierr; 2004aa3045dSJed Brown PetscInt i,j,rstart,rend,nrows,ncols,Ncols,nlrows,nlcols; 2015d0c19d7SBarry Smith const PetscInt *irow,*icol; 20287828ca2SBarry Smith PetscScalar *av,*bv,*v = lmat->v; 203ca3fa75bSLois Curfman McInnes Mat newmat; 2044aa3045dSJed Brown IS iscol_local; 205ca3fa75bSLois Curfman McInnes 206ca3fa75bSLois Curfman McInnes PetscFunctionBegin; 2074aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 208ca3fa75bSLois Curfman McInnes ierr = ISGetIndices(isrow,&irow);CHKERRQ(ierr); 2094aa3045dSJed Brown ierr = ISGetIndices(iscol_local,&icol);CHKERRQ(ierr); 210b9b97703SBarry Smith ierr = ISGetLocalSize(isrow,&nrows);CHKERRQ(ierr); 211b9b97703SBarry Smith ierr = ISGetLocalSize(iscol,&ncols);CHKERRQ(ierr); 2124aa3045dSJed Brown ierr = ISGetSize(iscol,&Ncols);CHKERRQ(ierr); /* global number of columns, size of iscol_local */ 213ca3fa75bSLois Curfman McInnes 214ca3fa75bSLois Curfman McInnes /* No parallel redistribution currently supported! Should really check each index set 2157eba5e9cSLois Curfman McInnes to comfirm that it is OK. ... Currently supports only submatrix same partitioning as 2167eba5e9cSLois Curfman McInnes original matrix! */ 217ca3fa75bSLois Curfman McInnes 218ca3fa75bSLois Curfman McInnes ierr = MatGetLocalSize(A,&nlrows,&nlcols);CHKERRQ(ierr); 2197eba5e9cSLois Curfman McInnes ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 220ca3fa75bSLois Curfman McInnes 221ca3fa75bSLois Curfman McInnes /* Check submatrix call */ 222ca3fa75bSLois Curfman McInnes if (scall == MAT_REUSE_MATRIX) { 223e32f2f54SBarry Smith /* SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Reused submatrix wrong size"); */ 2247eba5e9cSLois Curfman McInnes /* Really need to test rows and column sizes! */ 225ca3fa75bSLois Curfman McInnes newmat = *B; 226ca3fa75bSLois Curfman McInnes } else { 227ca3fa75bSLois Curfman McInnes /* Create and fill new matrix */ 2287adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&newmat);CHKERRQ(ierr); 2294aa3045dSJed Brown ierr = MatSetSizes(newmat,nrows,ncols,PETSC_DECIDE,Ncols);CHKERRQ(ierr); 2307adad957SLisandro Dalcin ierr = MatSetType(newmat,((PetscObject)A)->type_name);CHKERRQ(ierr); 231878740d9SKris Buschelman ierr = MatMPIDenseSetPreallocation(newmat,PETSC_NULL);CHKERRQ(ierr); 232ca3fa75bSLois Curfman McInnes } 233ca3fa75bSLois Curfman McInnes 234ca3fa75bSLois Curfman McInnes /* Now extract the data pointers and do the copy, column at a time */ 235ca3fa75bSLois Curfman McInnes newmatd = (Mat_MPIDense*)newmat->data; 236ca3fa75bSLois Curfman McInnes bv = ((Mat_SeqDense *)newmatd->A->data)->v; 237ca3fa75bSLois Curfman McInnes 2384aa3045dSJed Brown for (i=0; i<Ncols; i++) { 23925a33276SHong Zhang av = v + ((Mat_SeqDense *)mat->A->data)->lda*icol[i]; 240ca3fa75bSLois Curfman McInnes for (j=0; j<nrows; j++) { 2417eba5e9cSLois Curfman McInnes *bv++ = av[irow[j] - rstart]; 242ca3fa75bSLois Curfman McInnes } 243ca3fa75bSLois Curfman McInnes } 244ca3fa75bSLois Curfman McInnes 245ca3fa75bSLois Curfman McInnes /* Assemble the matrices so that the correct flags are set */ 246ca3fa75bSLois Curfman McInnes ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 247ca3fa75bSLois Curfman McInnes ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 248ca3fa75bSLois Curfman McInnes 249ca3fa75bSLois Curfman McInnes /* Free work space */ 250ca3fa75bSLois Curfman McInnes ierr = ISRestoreIndices(isrow,&irow);CHKERRQ(ierr); 251ca3fa75bSLois Curfman McInnes ierr = ISRestoreIndices(iscol,&icol);CHKERRQ(ierr); 252ca3fa75bSLois Curfman McInnes *B = newmat; 253ca3fa75bSLois Curfman McInnes PetscFunctionReturn(0); 254ca3fa75bSLois Curfman McInnes } 255ca3fa75bSLois Curfman McInnes 2564a2ae208SSatish Balay #undef __FUNCT__ 2574a2ae208SSatish Balay #define __FUNCT__ "MatRestoreArray_MPIDense" 258dfbe8321SBarry Smith PetscErrorCode MatRestoreArray_MPIDense(Mat A,PetscScalar *array[]) 259ff14e315SSatish Balay { 2603a40ed3dSBarry Smith PetscFunctionBegin; 2613a40ed3dSBarry Smith PetscFunctionReturn(0); 262ff14e315SSatish Balay } 263ff14e315SSatish Balay 2644a2ae208SSatish Balay #undef __FUNCT__ 2654a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIDense" 266dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIDense(Mat mat,MatAssemblyType mode) 2678965ea79SLois Curfman McInnes { 26839ddd567SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 2697adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 270dfbe8321SBarry Smith PetscErrorCode ierr; 27113f74950SBarry Smith PetscInt nstash,reallocs; 2728965ea79SLois Curfman McInnes InsertMode addv; 2738965ea79SLois Curfman McInnes 2743a40ed3dSBarry Smith PetscFunctionBegin; 2758965ea79SLois Curfman McInnes /* make sure all processors are either in INSERTMODE or ADDMODE */ 276ca161407SBarry Smith ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,comm);CHKERRQ(ierr); 277e7e72b3dSBarry Smith if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix adds/inserts on different procs"); 278e0fa3b82SLois Curfman McInnes mat->insertmode = addv; /* in case this processor had no cache */ 2798965ea79SLois Curfman McInnes 280d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 2818798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 282ae15b995SBarry Smith ierr = PetscInfo2(mdn->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 2833a40ed3dSBarry Smith PetscFunctionReturn(0); 2848965ea79SLois Curfman McInnes } 2858965ea79SLois Curfman McInnes 2864a2ae208SSatish Balay #undef __FUNCT__ 2874a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIDense" 288dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIDense(Mat mat,MatAssemblyType mode) 2898965ea79SLois Curfman McInnes { 29039ddd567SLois Curfman McInnes Mat_MPIDense *mdn=(Mat_MPIDense*)mat->data; 2916849ba73SBarry Smith PetscErrorCode ierr; 29213f74950SBarry Smith PetscInt i,*row,*col,flg,j,rstart,ncols; 29313f74950SBarry Smith PetscMPIInt n; 29487828ca2SBarry Smith PetscScalar *val; 295e0fa3b82SLois Curfman McInnes InsertMode addv=mat->insertmode; 2968965ea79SLois Curfman McInnes 2973a40ed3dSBarry Smith PetscFunctionBegin; 2988965ea79SLois Curfman McInnes /* wait on receives */ 2997ef1d9bdSSatish Balay while (1) { 3008798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 3017ef1d9bdSSatish Balay if (!flg) break; 3028965ea79SLois Curfman McInnes 3037ef1d9bdSSatish Balay for (i=0; i<n;) { 3047ef1d9bdSSatish Balay /* Now identify the consecutive vals belonging to the same row */ 3057ef1d9bdSSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 3067ef1d9bdSSatish Balay if (j < n) ncols = j-i; 3077ef1d9bdSSatish Balay else ncols = n-i; 3087ef1d9bdSSatish Balay /* Now assemble all these values with a single function call */ 3097ef1d9bdSSatish Balay ierr = MatSetValues_MPIDense(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 3107ef1d9bdSSatish Balay i = j; 3118965ea79SLois Curfman McInnes } 3127ef1d9bdSSatish Balay } 3138798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 3148965ea79SLois Curfman McInnes 31539ddd567SLois Curfman McInnes ierr = MatAssemblyBegin(mdn->A,mode);CHKERRQ(ierr); 31639ddd567SLois Curfman McInnes ierr = MatAssemblyEnd(mdn->A,mode);CHKERRQ(ierr); 3178965ea79SLois Curfman McInnes 3186d4a8577SBarry Smith if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 31939ddd567SLois Curfman McInnes ierr = MatSetUpMultiply_MPIDense(mat);CHKERRQ(ierr); 3208965ea79SLois Curfman McInnes } 3213a40ed3dSBarry Smith PetscFunctionReturn(0); 3228965ea79SLois Curfman McInnes } 3238965ea79SLois Curfman McInnes 3244a2ae208SSatish Balay #undef __FUNCT__ 3254a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIDense" 326dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIDense(Mat A) 3278965ea79SLois Curfman McInnes { 328dfbe8321SBarry Smith PetscErrorCode ierr; 32939ddd567SLois Curfman McInnes Mat_MPIDense *l = (Mat_MPIDense*)A->data; 3303a40ed3dSBarry Smith 3313a40ed3dSBarry Smith PetscFunctionBegin; 3323a40ed3dSBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 3333a40ed3dSBarry Smith PetscFunctionReturn(0); 3348965ea79SLois Curfman McInnes } 3358965ea79SLois Curfman McInnes 3368965ea79SLois Curfman McInnes /* the code does not do the diagonal entries correctly unless the 3378965ea79SLois Curfman McInnes matrix is square and the column and row owerships are identical. 3388965ea79SLois Curfman McInnes This is a BUG. The only way to fix it seems to be to access 3393501a2bdSLois Curfman McInnes mdn->A and mdn->B directly and not through the MatZeroRows() 3408965ea79SLois Curfman McInnes routine. 3418965ea79SLois Curfman McInnes */ 3424a2ae208SSatish Balay #undef __FUNCT__ 3434a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIDense" 344*2b40b63fSBarry Smith PetscErrorCode MatZeroRows_MPIDense(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag,Vec x,Vec b) 3458965ea79SLois Curfman McInnes { 34639ddd567SLois Curfman McInnes Mat_MPIDense *l = (Mat_MPIDense*)A->data; 3476849ba73SBarry Smith PetscErrorCode ierr; 348d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 34913f74950SBarry Smith PetscInt *nprocs,j,idx,nsends; 35013f74950SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 3517adad957SLisandro Dalcin PetscInt *rvalues,tag = ((PetscObject)A)->tag,count,base,slen,*source; 35213f74950SBarry Smith PetscInt *lens,*lrows,*values; 35313f74950SBarry Smith PetscMPIInt n,imdex,rank = l->rank,size = l->size; 3547adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 3558965ea79SLois Curfman McInnes MPI_Request *send_waits,*recv_waits; 3568965ea79SLois Curfman McInnes MPI_Status recv_status,*send_status; 357ace3abfcSBarry Smith PetscBool found; 3588965ea79SLois Curfman McInnes 3593a40ed3dSBarry Smith PetscFunctionBegin; 3608965ea79SLois Curfman McInnes /* first count number of contributors to each processor */ 36113f74950SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 36213f74950SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 36313f74950SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 3648965ea79SLois Curfman McInnes for (i=0; i<N; i++) { 3658965ea79SLois Curfman McInnes idx = rows[i]; 36635d8aa7fSBarry Smith found = PETSC_FALSE; 3678965ea79SLois Curfman McInnes for (j=0; j<size; j++) { 3688965ea79SLois Curfman McInnes if (idx >= owners[j] && idx < owners[j+1]) { 369c1dc657dSBarry Smith nprocs[2*j]++; nprocs[2*j+1] = 1; owner[i] = j; found = PETSC_TRUE; break; 3708965ea79SLois Curfman McInnes } 3718965ea79SLois Curfman McInnes } 372e32f2f54SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 3738965ea79SLois Curfman McInnes } 374c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 3758965ea79SLois Curfman McInnes 3768965ea79SLois Curfman McInnes /* inform other processors of number of messages and max length*/ 377c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 3788965ea79SLois Curfman McInnes 3798965ea79SLois Curfman McInnes /* post receives: */ 38013f74950SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 381b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 3828965ea79SLois Curfman McInnes for (i=0; i<nrecvs; i++) { 38313f74950SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 3848965ea79SLois Curfman McInnes } 3858965ea79SLois Curfman McInnes 3868965ea79SLois Curfman McInnes /* do sends: 3878965ea79SLois Curfman McInnes 1) starts[i] gives the starting index in svalues for stuff going to 3888965ea79SLois Curfman McInnes the ith processor 3898965ea79SLois Curfman McInnes */ 39013f74950SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 391b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 39213f74950SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 3938965ea79SLois Curfman McInnes starts[0] = 0; 394c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 3958965ea79SLois Curfman McInnes for (i=0; i<N; i++) { 3968965ea79SLois Curfman McInnes svalues[starts[owner[i]]++] = rows[i]; 3978965ea79SLois Curfman McInnes } 3988965ea79SLois Curfman McInnes 3998965ea79SLois Curfman McInnes starts[0] = 0; 400c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 4018965ea79SLois Curfman McInnes count = 0; 4028965ea79SLois Curfman McInnes for (i=0; i<size; i++) { 403c1dc657dSBarry Smith if (nprocs[2*i+1]) { 40413f74950SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 4058965ea79SLois Curfman McInnes } 4068965ea79SLois Curfman McInnes } 407606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 4088965ea79SLois Curfman McInnes 4098965ea79SLois Curfman McInnes base = owners[rank]; 4108965ea79SLois Curfman McInnes 4118965ea79SLois Curfman McInnes /* wait on receives */ 41274ed9c26SBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 41374ed9c26SBarry Smith count = nrecvs; 41474ed9c26SBarry Smith slen = 0; 4158965ea79SLois Curfman McInnes while (count) { 416ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 4178965ea79SLois Curfman McInnes /* unpack receives into our local space */ 41813f74950SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 4198965ea79SLois Curfman McInnes source[imdex] = recv_status.MPI_SOURCE; 4208965ea79SLois Curfman McInnes lens[imdex] = n; 4218965ea79SLois Curfman McInnes slen += n; 4228965ea79SLois Curfman McInnes count--; 4238965ea79SLois Curfman McInnes } 424606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 4258965ea79SLois Curfman McInnes 4268965ea79SLois Curfman McInnes /* move the data into the send scatter */ 42713f74950SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 4288965ea79SLois Curfman McInnes count = 0; 4298965ea79SLois Curfman McInnes for (i=0; i<nrecvs; i++) { 4308965ea79SLois Curfman McInnes values = rvalues + i*nmax; 4318965ea79SLois Curfman McInnes for (j=0; j<lens[i]; j++) { 4328965ea79SLois Curfman McInnes lrows[count++] = values[j] - base; 4338965ea79SLois Curfman McInnes } 4348965ea79SLois Curfman McInnes } 435606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 43674ed9c26SBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 437606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 438606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 4398965ea79SLois Curfman McInnes 4408965ea79SLois Curfman McInnes /* actually zap the local rows */ 441*2b40b63fSBarry Smith ierr = MatZeroRows(l->A,slen,lrows,diag,0,0);CHKERRQ(ierr); 442606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 4438965ea79SLois Curfman McInnes 4448965ea79SLois Curfman McInnes /* wait on sends */ 4458965ea79SLois Curfman McInnes if (nsends) { 446b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 447ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 448606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 4498965ea79SLois Curfman McInnes } 450606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 451606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 4528965ea79SLois Curfman McInnes 4533a40ed3dSBarry Smith PetscFunctionReturn(0); 4548965ea79SLois Curfman McInnes } 4558965ea79SLois Curfman McInnes 4564a2ae208SSatish Balay #undef __FUNCT__ 4574a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIDense" 458dfbe8321SBarry Smith PetscErrorCode MatMult_MPIDense(Mat mat,Vec xx,Vec yy) 4598965ea79SLois Curfman McInnes { 46039ddd567SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 461dfbe8321SBarry Smith PetscErrorCode ierr; 462c456f294SBarry Smith 4633a40ed3dSBarry Smith PetscFunctionBegin; 464ca9f406cSSatish Balay ierr = VecScatterBegin(mdn->Mvctx,xx,mdn->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 465ca9f406cSSatish Balay ierr = VecScatterEnd(mdn->Mvctx,xx,mdn->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 46644cd7ae7SLois Curfman McInnes ierr = MatMult_SeqDense(mdn->A,mdn->lvec,yy);CHKERRQ(ierr); 4673a40ed3dSBarry Smith PetscFunctionReturn(0); 4688965ea79SLois Curfman McInnes } 4698965ea79SLois Curfman McInnes 4704a2ae208SSatish Balay #undef __FUNCT__ 4714a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIDense" 472dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIDense(Mat mat,Vec xx,Vec yy,Vec zz) 4738965ea79SLois Curfman McInnes { 47439ddd567SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 475dfbe8321SBarry Smith PetscErrorCode ierr; 476c456f294SBarry Smith 4773a40ed3dSBarry Smith PetscFunctionBegin; 478ca9f406cSSatish Balay ierr = VecScatterBegin(mdn->Mvctx,xx,mdn->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 479ca9f406cSSatish Balay ierr = VecScatterEnd(mdn->Mvctx,xx,mdn->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 48044cd7ae7SLois Curfman McInnes ierr = MatMultAdd_SeqDense(mdn->A,mdn->lvec,yy,zz);CHKERRQ(ierr); 4813a40ed3dSBarry Smith PetscFunctionReturn(0); 4828965ea79SLois Curfman McInnes } 4838965ea79SLois Curfman McInnes 4844a2ae208SSatish Balay #undef __FUNCT__ 4854a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIDense" 486dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIDense(Mat A,Vec xx,Vec yy) 487096963f5SLois Curfman McInnes { 488096963f5SLois Curfman McInnes Mat_MPIDense *a = (Mat_MPIDense*)A->data; 489dfbe8321SBarry Smith PetscErrorCode ierr; 49087828ca2SBarry Smith PetscScalar zero = 0.0; 491096963f5SLois Curfman McInnes 4923a40ed3dSBarry Smith PetscFunctionBegin; 4932dcb1b2aSMatthew Knepley ierr = VecSet(yy,zero);CHKERRQ(ierr); 4947c922b88SBarry Smith ierr = MatMultTranspose_SeqDense(a->A,xx,a->lvec);CHKERRQ(ierr); 495ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 496ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 4973a40ed3dSBarry Smith PetscFunctionReturn(0); 498096963f5SLois Curfman McInnes } 499096963f5SLois Curfman McInnes 5004a2ae208SSatish Balay #undef __FUNCT__ 5014a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIDense" 502dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIDense(Mat A,Vec xx,Vec yy,Vec zz) 503096963f5SLois Curfman McInnes { 504096963f5SLois Curfman McInnes Mat_MPIDense *a = (Mat_MPIDense*)A->data; 505dfbe8321SBarry Smith PetscErrorCode ierr; 506096963f5SLois Curfman McInnes 5073a40ed3dSBarry Smith PetscFunctionBegin; 5083501a2bdSLois Curfman McInnes ierr = VecCopy(yy,zz);CHKERRQ(ierr); 5097c922b88SBarry Smith ierr = MatMultTranspose_SeqDense(a->A,xx,a->lvec);CHKERRQ(ierr); 510ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 511ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 5123a40ed3dSBarry Smith PetscFunctionReturn(0); 513096963f5SLois Curfman McInnes } 514096963f5SLois Curfman McInnes 5154a2ae208SSatish Balay #undef __FUNCT__ 5164a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIDense" 517dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIDense(Mat A,Vec v) 5188965ea79SLois Curfman McInnes { 51939ddd567SLois Curfman McInnes Mat_MPIDense *a = (Mat_MPIDense*)A->data; 520096963f5SLois Curfman McInnes Mat_SeqDense *aloc = (Mat_SeqDense*)a->A->data; 521dfbe8321SBarry Smith PetscErrorCode ierr; 522d0f46423SBarry Smith PetscInt len,i,n,m = A->rmap->n,radd; 52387828ca2SBarry Smith PetscScalar *x,zero = 0.0; 524ed3cc1f0SBarry Smith 5253a40ed3dSBarry Smith PetscFunctionBegin; 5262dcb1b2aSMatthew Knepley ierr = VecSet(v,zero);CHKERRQ(ierr); 5271ebc52fbSHong Zhang ierr = VecGetArray(v,&x);CHKERRQ(ierr); 528096963f5SLois Curfman McInnes ierr = VecGetSize(v,&n);CHKERRQ(ierr); 529e32f2f54SBarry Smith if (n != A->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Nonconforming mat and vec"); 530d0f46423SBarry Smith len = PetscMin(a->A->rmap->n,a->A->cmap->n); 531d0f46423SBarry Smith radd = A->rmap->rstart*m; 53244cd7ae7SLois Curfman McInnes for (i=0; i<len; i++) { 533096963f5SLois Curfman McInnes x[i] = aloc->v[radd + i*m + i]; 534096963f5SLois Curfman McInnes } 5351ebc52fbSHong Zhang ierr = VecRestoreArray(v,&x);CHKERRQ(ierr); 5363a40ed3dSBarry Smith PetscFunctionReturn(0); 5378965ea79SLois Curfman McInnes } 5388965ea79SLois Curfman McInnes 5394a2ae208SSatish Balay #undef __FUNCT__ 5404a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIDense" 541dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIDense(Mat mat) 5428965ea79SLois Curfman McInnes { 5433501a2bdSLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 544dfbe8321SBarry Smith PetscErrorCode ierr; 54501b82886SBarry Smith #if defined(PETSC_HAVE_PLAPACK) 54601b82886SBarry Smith Mat_Plapack *lu=(Mat_Plapack*)(mat->spptr); 54701b82886SBarry Smith #endif 548ed3cc1f0SBarry Smith 5493a40ed3dSBarry Smith PetscFunctionBegin; 55094d884c6SBarry Smith 551aa482453SBarry Smith #if defined(PETSC_USE_LOG) 552d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 5538965ea79SLois Curfman McInnes #endif 5548798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 5553501a2bdSLois Curfman McInnes ierr = MatDestroy(mdn->A);CHKERRQ(ierr); 55605b42c5fSBarry Smith if (mdn->lvec) {ierr = VecDestroy(mdn->lvec);CHKERRQ(ierr);} 55705b42c5fSBarry Smith if (mdn->Mvctx) {ierr = VecScatterDestroy(mdn->Mvctx);CHKERRQ(ierr);} 55801b82886SBarry Smith #if defined(PETSC_HAVE_PLAPACK) 5592fbe02b9SBarry Smith if (lu) { 56062b4c0b3SBarry Smith ierr = PLA_Obj_free(&lu->A);CHKERRQ(ierr); 56162b4c0b3SBarry Smith ierr = PLA_Obj_free (&lu->pivots);CHKERRQ(ierr); 56262b4c0b3SBarry Smith ierr = PLA_Temp_free(&lu->templ);CHKERRQ(ierr); 56309d27a7eSBarry Smith 5642fbe02b9SBarry Smith if (lu->is_pla) { 56501b82886SBarry Smith ierr = ISDestroy(lu->is_pla);CHKERRQ(ierr); 56601b82886SBarry Smith ierr = ISDestroy(lu->is_petsc);CHKERRQ(ierr); 56701b82886SBarry Smith ierr = VecScatterDestroy(lu->ctx);CHKERRQ(ierr); 568622d7880SLois Curfman McInnes } 5692fbe02b9SBarry Smith } 57001b82886SBarry Smith #endif 57101b82886SBarry Smith 572606d414cSSatish Balay ierr = PetscFree(mdn);CHKERRQ(ierr); 573dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 574901853e0SKris Buschelman ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 575901853e0SKris Buschelman ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatMPIDenseSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 5764ae313f4SHong Zhang ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMatMult_mpiaij_mpidense_C","",PETSC_NULL);CHKERRQ(ierr); 5774ae313f4SHong Zhang ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMatMultSymbolic_mpiaij_mpidense_C","",PETSC_NULL);CHKERRQ(ierr); 5784ae313f4SHong Zhang ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMatMultNumeric_mpiaij_mpidense_C","",PETSC_NULL);CHKERRQ(ierr); 5793a40ed3dSBarry Smith PetscFunctionReturn(0); 5808965ea79SLois Curfman McInnes } 58139ddd567SLois Curfman McInnes 5824a2ae208SSatish Balay #undef __FUNCT__ 5834a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIDense_Binary" 5846849ba73SBarry Smith static PetscErrorCode MatView_MPIDense_Binary(Mat mat,PetscViewer viewer) 5858965ea79SLois Curfman McInnes { 58639ddd567SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 587dfbe8321SBarry Smith PetscErrorCode ierr; 588aa05aa95SBarry Smith PetscViewerFormat format; 589aa05aa95SBarry Smith int fd; 590d0f46423SBarry Smith PetscInt header[4],mmax,N = mat->cmap->N,i,j,m,k; 591aa05aa95SBarry Smith PetscMPIInt rank,tag = ((PetscObject)viewer)->tag,size; 592578230a0SSatish Balay PetscScalar *work,*v,*vv; 593aa05aa95SBarry Smith Mat_SeqDense *a = (Mat_SeqDense*)mdn->A->data; 594aa05aa95SBarry Smith MPI_Status status; 5957056b6fcSBarry Smith 5963a40ed3dSBarry Smith PetscFunctionBegin; 59739ddd567SLois Curfman McInnes if (mdn->size == 1) { 59839ddd567SLois Curfman McInnes ierr = MatView(mdn->A,viewer);CHKERRQ(ierr); 599aa05aa95SBarry Smith } else { 600aa05aa95SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 6017adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 6027adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 603aa05aa95SBarry Smith 604aa05aa95SBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 605f4403165SShri Abhyankar if (format == PETSC_VIEWER_NATIVE) { 606aa05aa95SBarry Smith 607aa05aa95SBarry Smith if (!rank) { 608aa05aa95SBarry Smith /* store the matrix as a dense matrix */ 6090700a824SBarry Smith header[0] = MAT_FILE_CLASSID; 610d0f46423SBarry Smith header[1] = mat->rmap->N; 611aa05aa95SBarry Smith header[2] = N; 612aa05aa95SBarry Smith header[3] = MATRIX_BINARY_FORMAT_DENSE; 613aa05aa95SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 614aa05aa95SBarry Smith 615aa05aa95SBarry Smith /* get largest work array needed for transposing array */ 616d0f46423SBarry Smith mmax = mat->rmap->n; 617aa05aa95SBarry Smith for (i=1; i<size; i++) { 618d0f46423SBarry Smith mmax = PetscMax(mmax,mat->rmap->range[i+1] - mat->rmap->range[i]); 6198965ea79SLois Curfman McInnes } 620aa05aa95SBarry Smith ierr = PetscMalloc(mmax*N*sizeof(PetscScalar),&work);CHKERRQ(ierr); 621aa05aa95SBarry Smith 622aa05aa95SBarry Smith /* write out local array, by rows */ 623d0f46423SBarry Smith m = mat->rmap->n; 624aa05aa95SBarry Smith v = a->v; 625aa05aa95SBarry Smith for (j=0; j<N; j++) { 626aa05aa95SBarry Smith for (i=0; i<m; i++) { 627578230a0SSatish Balay work[j + i*N] = *v++; 628aa05aa95SBarry Smith } 629aa05aa95SBarry Smith } 630aa05aa95SBarry Smith ierr = PetscBinaryWrite(fd,work,m*N,PETSC_SCALAR,PETSC_FALSE);CHKERRQ(ierr); 631aa05aa95SBarry Smith /* get largest work array to receive messages from other processes, excludes process zero */ 632aa05aa95SBarry Smith mmax = 0; 633aa05aa95SBarry Smith for (i=1; i<size; i++) { 634d0f46423SBarry Smith mmax = PetscMax(mmax,mat->rmap->range[i+1] - mat->rmap->range[i]); 635aa05aa95SBarry Smith } 636578230a0SSatish Balay ierr = PetscMalloc(mmax*N*sizeof(PetscScalar),&vv);CHKERRQ(ierr); 637aa05aa95SBarry Smith for(k = 1; k < size; k++) { 638f8009846SMatthew Knepley v = vv; 639d0f46423SBarry Smith m = mat->rmap->range[k+1] - mat->rmap->range[k]; 6407adad957SLisandro Dalcin ierr = MPI_Recv(v,m*N,MPIU_SCALAR,k,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 641aa05aa95SBarry Smith 642aa05aa95SBarry Smith for(j = 0; j < N; j++) { 643aa05aa95SBarry Smith for(i = 0; i < m; i++) { 644578230a0SSatish Balay work[j + i*N] = *v++; 645aa05aa95SBarry Smith } 646aa05aa95SBarry Smith } 647aa05aa95SBarry Smith ierr = PetscBinaryWrite(fd,work,m*N,PETSC_SCALAR,PETSC_FALSE);CHKERRQ(ierr); 648aa05aa95SBarry Smith } 649aa05aa95SBarry Smith ierr = PetscFree(work);CHKERRQ(ierr); 650578230a0SSatish Balay ierr = PetscFree(vv);CHKERRQ(ierr); 651aa05aa95SBarry Smith } else { 652d0f46423SBarry Smith ierr = MPI_Send(a->v,mat->rmap->n*mat->cmap->N,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 653aa05aa95SBarry Smith } 654f4403165SShri Abhyankar } else { 655ea2e366bSHong Zhang SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"To store a parallel dense matrix you must first call PetscViewerSetFormat(viewer,PETSC_VIEWER_NATIVE)"); 656f4403165SShri Abhyankar } 657aa05aa95SBarry Smith } 6583a40ed3dSBarry Smith PetscFunctionReturn(0); 6598965ea79SLois Curfman McInnes } 6608965ea79SLois Curfman McInnes 6614a2ae208SSatish Balay #undef __FUNCT__ 6624a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIDense_ASCIIorDraworSocket" 6636849ba73SBarry Smith static PetscErrorCode MatView_MPIDense_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 6648965ea79SLois Curfman McInnes { 66539ddd567SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)mat->data; 666dfbe8321SBarry Smith PetscErrorCode ierr; 66732dcc486SBarry Smith PetscMPIInt size = mdn->size,rank = mdn->rank; 668a313700dSBarry Smith const PetscViewerType vtype; 669ace3abfcSBarry Smith PetscBool iascii,isdraw; 670b0a32e0cSBarry Smith PetscViewer sviewer; 671f3ef73ceSBarry Smith PetscViewerFormat format; 67201b82886SBarry Smith #if defined(PETSC_HAVE_PLAPACK) 6735d00a290SHong Zhang Mat_Plapack *lu; 67401b82886SBarry Smith #endif 6758965ea79SLois Curfman McInnes 6763a40ed3dSBarry Smith PetscFunctionBegin; 6772692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 6782692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 67932077d6dSBarry Smith if (iascii) { 680b0a32e0cSBarry Smith ierr = PetscViewerGetType(viewer,&vtype);CHKERRQ(ierr); 681b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 682456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 6834e220ebcSLois Curfman McInnes MatInfo info; 684888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 685d0f46423SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer," [%d] local rows %D nz %D nz alloced %D mem %D \n",rank,mat->rmap->n, 68677431f27SBarry Smith (PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 687b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 68801b82886SBarry Smith #if defined(PETSC_HAVE_PLAPACK) 68901b82886SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"PLAPACK run parameters:\n");CHKERRQ(ierr); 6905d00a290SHong Zhang ierr = PetscViewerASCIIPrintf(viewer," Processor mesh: nprows %d, npcols %d\n",Plapack_nprows, Plapack_npcols);CHKERRQ(ierr); 6915d00a290SHong Zhang ierr = PetscViewerASCIIPrintf(viewer," Error checking: %d\n",Plapack_ierror);CHKERRQ(ierr); 6925d00a290SHong Zhang ierr = PetscViewerASCIIPrintf(viewer," Algorithmic block size: %d\n",Plapack_nb_alg);CHKERRQ(ierr); 693d5f3da31SBarry Smith if (mat->factortype){ 6945d00a290SHong Zhang lu=(Mat_Plapack*)(mat->spptr); 69501b82886SBarry Smith ierr = PetscViewerASCIIPrintf(viewer," Distr. block size nb: %d \n",lu->nb);CHKERRQ(ierr); 6965d00a290SHong Zhang } 6975d00a290SHong Zhang #else 6985d00a290SHong Zhang ierr = VecScatterView(mdn->Mvctx,viewer);CHKERRQ(ierr); 69901b82886SBarry Smith #endif 7003a40ed3dSBarry Smith PetscFunctionReturn(0); 701fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 7023a40ed3dSBarry Smith PetscFunctionReturn(0); 7038965ea79SLois Curfman McInnes } 704f1af5d2fSBarry Smith } else if (isdraw) { 705b0a32e0cSBarry Smith PetscDraw draw; 706ace3abfcSBarry Smith PetscBool isnull; 707f1af5d2fSBarry Smith 708b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 709b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); 710f1af5d2fSBarry Smith if (isnull) PetscFunctionReturn(0); 711f1af5d2fSBarry Smith } 71277ed5343SBarry Smith 7138965ea79SLois Curfman McInnes if (size == 1) { 71439ddd567SLois Curfman McInnes ierr = MatView(mdn->A,viewer);CHKERRQ(ierr); 7153a40ed3dSBarry Smith } else { 7168965ea79SLois Curfman McInnes /* assemble the entire matrix onto first processor. */ 7178965ea79SLois Curfman McInnes Mat A; 718d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,row,i,nz; 719ba8c8a56SBarry Smith PetscInt *cols; 720ba8c8a56SBarry Smith PetscScalar *vals; 7218965ea79SLois Curfman McInnes 7227adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 7238965ea79SLois Curfman McInnes if (!rank) { 724f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 7253a40ed3dSBarry Smith } else { 726f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 7278965ea79SLois Curfman McInnes } 7287adad957SLisandro Dalcin /* Since this is a temporary matrix, MATMPIDENSE instead of ((PetscObject)A)->type_name here is probably acceptable. */ 729878740d9SKris Buschelman ierr = MatSetType(A,MATMPIDENSE);CHKERRQ(ierr); 730878740d9SKris Buschelman ierr = MatMPIDenseSetPreallocation(A,PETSC_NULL); 73152e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 7328965ea79SLois Curfman McInnes 73339ddd567SLois Curfman McInnes /* Copy the matrix ... This isn't the most efficient means, 73439ddd567SLois Curfman McInnes but it's quick for now */ 73551022da4SBarry Smith A->insertmode = INSERT_VALUES; 736d0f46423SBarry Smith row = mat->rmap->rstart; m = mdn->A->rmap->n; 7378965ea79SLois Curfman McInnes for (i=0; i<m; i++) { 738ba8c8a56SBarry Smith ierr = MatGetRow_MPIDense(mat,row,&nz,&cols,&vals);CHKERRQ(ierr); 739ba8c8a56SBarry Smith ierr = MatSetValues_MPIDense(A,1,&row,nz,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 740ba8c8a56SBarry Smith ierr = MatRestoreRow_MPIDense(mat,row,&nz,&cols,&vals);CHKERRQ(ierr); 74139ddd567SLois Curfman McInnes row++; 7428965ea79SLois Curfman McInnes } 7438965ea79SLois Curfman McInnes 7446d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 7456d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 746b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 747b9b97703SBarry Smith if (!rank) { 7487566de4bSShri Abhyankar ierr = PetscObjectSetName((PetscObject)((Mat_MPIDense*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 7497566de4bSShri Abhyankar /* Set the type name to MATMPIDense so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqDense_ASCII()*/ 7507566de4bSShri Abhyankar PetscStrcpy(((PetscObject)((Mat_MPIDense*)(A->data))->A)->type_name,MATMPIDENSE); 7516831982aSBarry Smith ierr = MatView(((Mat_MPIDense*)(A->data))->A,sviewer);CHKERRQ(ierr); 7528965ea79SLois Curfman McInnes } 753b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 754b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 7558965ea79SLois Curfman McInnes ierr = MatDestroy(A);CHKERRQ(ierr); 7568965ea79SLois Curfman McInnes } 7573a40ed3dSBarry Smith PetscFunctionReturn(0); 7588965ea79SLois Curfman McInnes } 7598965ea79SLois Curfman McInnes 7604a2ae208SSatish Balay #undef __FUNCT__ 7614a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIDense" 762dfbe8321SBarry Smith PetscErrorCode MatView_MPIDense(Mat mat,PetscViewer viewer) 7638965ea79SLois Curfman McInnes { 764dfbe8321SBarry Smith PetscErrorCode ierr; 765ace3abfcSBarry Smith PetscBool iascii,isbinary,isdraw,issocket; 7668965ea79SLois Curfman McInnes 767433994e6SBarry Smith PetscFunctionBegin; 7680f5bd95cSBarry Smith 7692692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 7702692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 7712692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr); 7722692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 7730f5bd95cSBarry Smith 77432077d6dSBarry Smith if (iascii || issocket || isdraw) { 775f1af5d2fSBarry Smith ierr = MatView_MPIDense_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 7760f5bd95cSBarry Smith } else if (isbinary) { 7773a40ed3dSBarry Smith ierr = MatView_MPIDense_Binary(mat,viewer);CHKERRQ(ierr); 7785cd90555SBarry Smith } else { 779e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPI dense matrix",((PetscObject)viewer)->type_name); 7808965ea79SLois Curfman McInnes } 7813a40ed3dSBarry Smith PetscFunctionReturn(0); 7828965ea79SLois Curfman McInnes } 7838965ea79SLois Curfman McInnes 7844a2ae208SSatish Balay #undef __FUNCT__ 7854a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIDense" 786dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIDense(Mat A,MatInfoType flag,MatInfo *info) 7878965ea79SLois Curfman McInnes { 7883501a2bdSLois Curfman McInnes Mat_MPIDense *mat = (Mat_MPIDense*)A->data; 7893501a2bdSLois Curfman McInnes Mat mdn = mat->A; 790dfbe8321SBarry Smith PetscErrorCode ierr; 791329f5518SBarry Smith PetscReal isend[5],irecv[5]; 7928965ea79SLois Curfman McInnes 7933a40ed3dSBarry Smith PetscFunctionBegin; 7944e220ebcSLois Curfman McInnes info->block_size = 1.0; 7954e220ebcSLois Curfman McInnes ierr = MatGetInfo(mdn,MAT_LOCAL,info);CHKERRQ(ierr); 7964e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 7974e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 7988965ea79SLois Curfman McInnes if (flag == MAT_LOCAL) { 7994e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 8004e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 8014e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 8024e220ebcSLois Curfman McInnes info->memory = isend[3]; 8034e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 8048965ea79SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 8057adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)A)->comm);CHKERRQ(ierr); 8064e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 8074e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 8084e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 8094e220ebcSLois Curfman McInnes info->memory = irecv[3]; 8104e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 8118965ea79SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 8127adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)A)->comm);CHKERRQ(ierr); 8134e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 8144e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 8154e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 8164e220ebcSLois Curfman McInnes info->memory = irecv[3]; 8174e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 8188965ea79SLois Curfman McInnes } 8194e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 8204e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 8214e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 8223a40ed3dSBarry Smith PetscFunctionReturn(0); 8238965ea79SLois Curfman McInnes } 8248965ea79SLois Curfman McInnes 8254a2ae208SSatish Balay #undef __FUNCT__ 8264a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIDense" 827ace3abfcSBarry Smith PetscErrorCode MatSetOption_MPIDense(Mat A,MatOption op,PetscBool flg) 8288965ea79SLois Curfman McInnes { 82939ddd567SLois Curfman McInnes Mat_MPIDense *a = (Mat_MPIDense*)A->data; 830dfbe8321SBarry Smith PetscErrorCode ierr; 8318965ea79SLois Curfman McInnes 8323a40ed3dSBarry Smith PetscFunctionBegin; 83312c028f9SKris Buschelman switch (op) { 834512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 83512c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 83612c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 8374e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 83812c028f9SKris Buschelman break; 83912c028f9SKris Buschelman case MAT_ROW_ORIENTED: 8404e0d8c25SBarry Smith a->roworiented = flg; 8414e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 84212c028f9SKris Buschelman break; 8434e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 84412c028f9SKris Buschelman case MAT_USE_HASH_TABLE: 845290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 84612c028f9SKris Buschelman break; 84712c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 8484e0d8c25SBarry Smith a->donotstash = flg; 84912c028f9SKris Buschelman break; 85077e54ba9SKris Buschelman case MAT_SYMMETRIC: 85177e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 8529a4540c5SBarry Smith case MAT_HERMITIAN: 8539a4540c5SBarry Smith case MAT_SYMMETRY_ETERNAL: 854600fe468SBarry Smith case MAT_IGNORE_LOWER_TRIANGULAR: 855290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 85677e54ba9SKris Buschelman break; 85712c028f9SKris Buschelman default: 858e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %s",MatOptions[op]); 8593a40ed3dSBarry Smith } 8603a40ed3dSBarry Smith PetscFunctionReturn(0); 8618965ea79SLois Curfman McInnes } 8628965ea79SLois Curfman McInnes 8638965ea79SLois Curfman McInnes 8644a2ae208SSatish Balay #undef __FUNCT__ 8654a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIDense" 866dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIDense(Mat A,Vec ll,Vec rr) 8675b2fa520SLois Curfman McInnes { 8685b2fa520SLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)A->data; 8695b2fa520SLois Curfman McInnes Mat_SeqDense *mat = (Mat_SeqDense*)mdn->A->data; 87087828ca2SBarry Smith PetscScalar *l,*r,x,*v; 871dfbe8321SBarry Smith PetscErrorCode ierr; 872d0f46423SBarry Smith PetscInt i,j,s2a,s3a,s2,s3,m=mdn->A->rmap->n,n=mdn->A->cmap->n; 8735b2fa520SLois Curfman McInnes 8745b2fa520SLois Curfman McInnes PetscFunctionBegin; 87572d926a5SLois Curfman McInnes ierr = MatGetLocalSize(A,&s2,&s3);CHKERRQ(ierr); 8765b2fa520SLois Curfman McInnes if (ll) { 87772d926a5SLois Curfman McInnes ierr = VecGetLocalSize(ll,&s2a);CHKERRQ(ierr); 878e32f2f54SBarry Smith if (s2a != s2) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Left scaling vector non-conforming local size, %d != %d.", s2a, s2); 8791ebc52fbSHong Zhang ierr = VecGetArray(ll,&l);CHKERRQ(ierr); 8805b2fa520SLois Curfman McInnes for (i=0; i<m; i++) { 8815b2fa520SLois Curfman McInnes x = l[i]; 8825b2fa520SLois Curfman McInnes v = mat->v + i; 8835b2fa520SLois Curfman McInnes for (j=0; j<n; j++) { (*v) *= x; v+= m;} 8845b2fa520SLois Curfman McInnes } 8851ebc52fbSHong Zhang ierr = VecRestoreArray(ll,&l);CHKERRQ(ierr); 886efee365bSSatish Balay ierr = PetscLogFlops(n*m);CHKERRQ(ierr); 8875b2fa520SLois Curfman McInnes } 8885b2fa520SLois Curfman McInnes if (rr) { 889175be7b4SMatthew Knepley ierr = VecGetLocalSize(rr,&s3a);CHKERRQ(ierr); 890e32f2f54SBarry Smith if (s3a != s3) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Right scaling vec non-conforming local size, %d != %d.", s3a, s3); 891ca9f406cSSatish Balay ierr = VecScatterBegin(mdn->Mvctx,rr,mdn->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 892ca9f406cSSatish Balay ierr = VecScatterEnd(mdn->Mvctx,rr,mdn->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 8931ebc52fbSHong Zhang ierr = VecGetArray(mdn->lvec,&r);CHKERRQ(ierr); 8945b2fa520SLois Curfman McInnes for (i=0; i<n; i++) { 8955b2fa520SLois Curfman McInnes x = r[i]; 8965b2fa520SLois Curfman McInnes v = mat->v + i*m; 8975b2fa520SLois Curfman McInnes for (j=0; j<m; j++) { (*v++) *= x;} 8985b2fa520SLois Curfman McInnes } 8991ebc52fbSHong Zhang ierr = VecRestoreArray(mdn->lvec,&r);CHKERRQ(ierr); 900efee365bSSatish Balay ierr = PetscLogFlops(n*m);CHKERRQ(ierr); 9015b2fa520SLois Curfman McInnes } 9025b2fa520SLois Curfman McInnes PetscFunctionReturn(0); 9035b2fa520SLois Curfman McInnes } 9045b2fa520SLois Curfman McInnes 9054a2ae208SSatish Balay #undef __FUNCT__ 9064a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIDense" 907dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIDense(Mat A,NormType type,PetscReal *nrm) 908096963f5SLois Curfman McInnes { 9093501a2bdSLois Curfman McInnes Mat_MPIDense *mdn = (Mat_MPIDense*)A->data; 9103501a2bdSLois Curfman McInnes Mat_SeqDense *mat = (Mat_SeqDense*)mdn->A->data; 911dfbe8321SBarry Smith PetscErrorCode ierr; 91213f74950SBarry Smith PetscInt i,j; 913329f5518SBarry Smith PetscReal sum = 0.0; 91487828ca2SBarry Smith PetscScalar *v = mat->v; 9153501a2bdSLois Curfman McInnes 9163a40ed3dSBarry Smith PetscFunctionBegin; 9173501a2bdSLois Curfman McInnes if (mdn->size == 1) { 918064f8208SBarry Smith ierr = MatNorm(mdn->A,type,nrm);CHKERRQ(ierr); 9193501a2bdSLois Curfman McInnes } else { 9203501a2bdSLois Curfman McInnes if (type == NORM_FROBENIUS) { 921d0f46423SBarry Smith for (i=0; i<mdn->A->cmap->n*mdn->A->rmap->n; i++) { 922aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 923329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 9243501a2bdSLois Curfman McInnes #else 9253501a2bdSLois Curfman McInnes sum += (*v)*(*v); v++; 9263501a2bdSLois Curfman McInnes #endif 9273501a2bdSLois Curfman McInnes } 9287adad957SLisandro Dalcin ierr = MPI_Allreduce(&sum,nrm,1,MPIU_REAL,MPI_SUM,((PetscObject)A)->comm);CHKERRQ(ierr); 929064f8208SBarry Smith *nrm = sqrt(*nrm); 930dc0b31edSSatish Balay ierr = PetscLogFlops(2.0*mdn->A->cmap->n*mdn->A->rmap->n);CHKERRQ(ierr); 9313a40ed3dSBarry Smith } else if (type == NORM_1) { 932329f5518SBarry Smith PetscReal *tmp,*tmp2; 93374ed9c26SBarry Smith ierr = PetscMalloc2(A->cmap->N,PetscReal,&tmp,A->cmap->N,PetscReal,&tmp2);CHKERRQ(ierr); 93474ed9c26SBarry Smith ierr = PetscMemzero(tmp,A->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 93574ed9c26SBarry Smith ierr = PetscMemzero(tmp2,A->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 936064f8208SBarry Smith *nrm = 0.0; 9373501a2bdSLois Curfman McInnes v = mat->v; 938d0f46423SBarry Smith for (j=0; j<mdn->A->cmap->n; j++) { 939d0f46423SBarry Smith for (i=0; i<mdn->A->rmap->n; i++) { 94067e560aaSBarry Smith tmp[j] += PetscAbsScalar(*v); v++; 9413501a2bdSLois Curfman McInnes } 9423501a2bdSLois Curfman McInnes } 943d0f46423SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,A->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)A)->comm);CHKERRQ(ierr); 944d0f46423SBarry Smith for (j=0; j<A->cmap->N; j++) { 945064f8208SBarry Smith if (tmp2[j] > *nrm) *nrm = tmp2[j]; 9463501a2bdSLois Curfman McInnes } 94774ed9c26SBarry Smith ierr = PetscFree2(tmp,tmp);CHKERRQ(ierr); 948d0f46423SBarry Smith ierr = PetscLogFlops(A->cmap->n*A->rmap->n);CHKERRQ(ierr); 9493a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 950329f5518SBarry Smith PetscReal ntemp; 9513501a2bdSLois Curfman McInnes ierr = MatNorm(mdn->A,type,&ntemp);CHKERRQ(ierr); 9527adad957SLisandro Dalcin ierr = MPI_Allreduce(&ntemp,nrm,1,MPIU_REAL,MPI_MAX,((PetscObject)A)->comm);CHKERRQ(ierr); 953e7e72b3dSBarry Smith } else SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"No support for two norm"); 9543501a2bdSLois Curfman McInnes } 9553a40ed3dSBarry Smith PetscFunctionReturn(0); 9563501a2bdSLois Curfman McInnes } 9573501a2bdSLois Curfman McInnes 9584a2ae208SSatish Balay #undef __FUNCT__ 9594a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIDense" 960fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIDense(Mat A,MatReuse reuse,Mat *matout) 9613501a2bdSLois Curfman McInnes { 9623501a2bdSLois Curfman McInnes Mat_MPIDense *a = (Mat_MPIDense*)A->data; 9633501a2bdSLois Curfman McInnes Mat_SeqDense *Aloc = (Mat_SeqDense*)a->A->data; 9643501a2bdSLois Curfman McInnes Mat B; 965d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,m,n,*rwork,rstart = A->rmap->rstart; 9666849ba73SBarry Smith PetscErrorCode ierr; 96713f74950SBarry Smith PetscInt j,i; 96887828ca2SBarry Smith PetscScalar *v; 9693501a2bdSLois Curfman McInnes 9703a40ed3dSBarry Smith PetscFunctionBegin; 971e7e72b3dSBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Supports square matrix only in-place"); 972fc4dec0aSBarry Smith if (reuse == MAT_INITIAL_MATRIX || A == *matout) { 9737adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 974d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 9757adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 976878740d9SKris Buschelman ierr = MatMPIDenseSetPreallocation(B,PETSC_NULL);CHKERRQ(ierr); 977fc4dec0aSBarry Smith } else { 978fc4dec0aSBarry Smith B = *matout; 979fc4dec0aSBarry Smith } 9803501a2bdSLois Curfman McInnes 981d0f46423SBarry Smith m = a->A->rmap->n; n = a->A->cmap->n; v = Aloc->v; 9821acff37aSSatish Balay ierr = PetscMalloc(m*sizeof(PetscInt),&rwork);CHKERRQ(ierr); 9833501a2bdSLois Curfman McInnes for (i=0; i<m; i++) rwork[i] = rstart + i; 9841acff37aSSatish Balay for (j=0; j<n; j++) { 9853501a2bdSLois Curfman McInnes ierr = MatSetValues(B,1,&j,m,rwork,v,INSERT_VALUES);CHKERRQ(ierr); 9863501a2bdSLois Curfman McInnes v += m; 9873501a2bdSLois Curfman McInnes } 988606d414cSSatish Balay ierr = PetscFree(rwork);CHKERRQ(ierr); 9896d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 9906d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 991815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 9923501a2bdSLois Curfman McInnes *matout = B; 9933501a2bdSLois Curfman McInnes } else { 994eb6b5d47SBarry Smith ierr = MatHeaderMerge(A,B);CHKERRQ(ierr); 9953501a2bdSLois Curfman McInnes } 9963a40ed3dSBarry Smith PetscFunctionReturn(0); 997096963f5SLois Curfman McInnes } 998096963f5SLois Curfman McInnes 99944cd7ae7SLois Curfman McInnes 10006849ba73SBarry Smith static PetscErrorCode MatDuplicate_MPIDense(Mat,MatDuplicateOption,Mat *); 1001d84338a6SBarry Smith extern PetscErrorCode MatScale_MPIDense(Mat,PetscScalar); 10028965ea79SLois Curfman McInnes 10034a2ae208SSatish Balay #undef __FUNCT__ 10044a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIDense" 1005dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIDense(Mat A) 1006273d9f13SBarry Smith { 1007dfbe8321SBarry Smith PetscErrorCode ierr; 1008273d9f13SBarry Smith 1009273d9f13SBarry Smith PetscFunctionBegin; 1010273d9f13SBarry Smith ierr = MatMPIDenseSetPreallocation(A,0);CHKERRQ(ierr); 1011273d9f13SBarry Smith PetscFunctionReturn(0); 1012273d9f13SBarry Smith } 1013273d9f13SBarry Smith 101401b82886SBarry Smith #if defined(PETSC_HAVE_PLAPACK) 1015eae6fb2eSBarry Smith 1016eae6fb2eSBarry Smith #undef __FUNCT__ 1017eae6fb2eSBarry Smith #define __FUNCT__ "MatMPIDenseCopyToPlapack" 1018eae6fb2eSBarry Smith PetscErrorCode MatMPIDenseCopyToPlapack(Mat A,Mat F) 1019eae6fb2eSBarry Smith { 1020eae6fb2eSBarry Smith Mat_Plapack *lu = (Mat_Plapack*)(F)->spptr; 1021eae6fb2eSBarry Smith PetscErrorCode ierr; 1022d0f46423SBarry Smith PetscInt M=A->cmap->N,m=A->rmap->n,rstart; 1023eae6fb2eSBarry Smith PetscScalar *array; 1024eae6fb2eSBarry Smith PetscReal one = 1.0; 1025eae6fb2eSBarry Smith 1026eae6fb2eSBarry Smith PetscFunctionBegin; 10272fbe02b9SBarry Smith /* Copy A into F->lu->A */ 1028eae6fb2eSBarry Smith ierr = PLA_Obj_set_to_zero(lu->A);CHKERRQ(ierr); 1029eae6fb2eSBarry Smith ierr = PLA_API_begin();CHKERRQ(ierr); 1030eae6fb2eSBarry Smith ierr = PLA_Obj_API_open(lu->A);CHKERRQ(ierr); 103179b0a62dSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr); 1032eae6fb2eSBarry Smith ierr = MatGetArray(A,&array);CHKERRQ(ierr); 1033eae6fb2eSBarry Smith ierr = PLA_API_axpy_matrix_to_global(m,M, &one,(void *)array,m,lu->A,rstart,0);CHKERRQ(ierr); 1034eae6fb2eSBarry Smith ierr = MatRestoreArray(A,&array);CHKERRQ(ierr); 1035eae6fb2eSBarry Smith ierr = PLA_Obj_API_close(lu->A);CHKERRQ(ierr); 1036eae6fb2eSBarry Smith ierr = PLA_API_end();CHKERRQ(ierr); 1037eae6fb2eSBarry Smith lu->rstart = rstart; 1038eae6fb2eSBarry Smith PetscFunctionReturn(0); 1039eae6fb2eSBarry Smith } 1040eae6fb2eSBarry Smith 104101b82886SBarry Smith #undef __FUNCT__ 10422fbe02b9SBarry Smith #define __FUNCT__ "MatMPIDenseCopyFromPlapack" 10432fbe02b9SBarry Smith PetscErrorCode MatMPIDenseCopyFromPlapack(Mat F,Mat A) 10442fbe02b9SBarry Smith { 10452fbe02b9SBarry Smith Mat_Plapack *lu = (Mat_Plapack*)(F)->spptr; 10462fbe02b9SBarry Smith PetscErrorCode ierr; 1047d0f46423SBarry Smith PetscInt M=A->cmap->N,m=A->rmap->n,rstart; 10482fbe02b9SBarry Smith PetscScalar *array; 10492fbe02b9SBarry Smith PetscReal one = 1.0; 10502fbe02b9SBarry Smith 10512fbe02b9SBarry Smith PetscFunctionBegin; 10522fbe02b9SBarry Smith /* Copy F into A->lu->A */ 105379b0a62dSBarry Smith ierr = MatZeroEntries(A);CHKERRQ(ierr); 10542fbe02b9SBarry Smith ierr = PLA_API_begin();CHKERRQ(ierr); 10552fbe02b9SBarry Smith ierr = PLA_Obj_API_open(lu->A);CHKERRQ(ierr); 10562fbe02b9SBarry Smith ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr); 10572fbe02b9SBarry Smith ierr = MatGetArray(A,&array);CHKERRQ(ierr); 10582fbe02b9SBarry Smith ierr = PLA_API_axpy_global_to_matrix(m,M, &one,lu->A,rstart,0,(void *)array,m);CHKERRQ(ierr); 10592fbe02b9SBarry Smith ierr = MatRestoreArray(A,&array);CHKERRQ(ierr); 10602fbe02b9SBarry Smith ierr = PLA_Obj_API_close(lu->A);CHKERRQ(ierr); 10612fbe02b9SBarry Smith ierr = PLA_API_end();CHKERRQ(ierr); 10622fbe02b9SBarry Smith lu->rstart = rstart; 10632fbe02b9SBarry Smith PetscFunctionReturn(0); 10642fbe02b9SBarry Smith } 10652fbe02b9SBarry Smith 10662fbe02b9SBarry Smith #undef __FUNCT__ 10672fbe02b9SBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIDense" 10682fbe02b9SBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIDense(Mat A,Mat B,Mat C) 10692fbe02b9SBarry Smith { 10702fbe02b9SBarry Smith PetscErrorCode ierr; 10712fbe02b9SBarry Smith Mat_Plapack *luA = (Mat_Plapack*)A->spptr; 10722fbe02b9SBarry Smith Mat_Plapack *luB = (Mat_Plapack*)B->spptr; 10732fbe02b9SBarry Smith Mat_Plapack *luC = (Mat_Plapack*)C->spptr; 10742fbe02b9SBarry Smith PLA_Obj alpha = NULL,beta = NULL; 10752fbe02b9SBarry Smith 10762fbe02b9SBarry Smith PetscFunctionBegin; 10772fbe02b9SBarry Smith ierr = MatMPIDenseCopyToPlapack(A,A);CHKERRQ(ierr); 10782fbe02b9SBarry Smith ierr = MatMPIDenseCopyToPlapack(B,B);CHKERRQ(ierr); 10792fbe02b9SBarry Smith 10806e6f9017SBarry Smith /* 1081cb2480eaSBarry Smith ierr = PLA_Global_show("A = ",luA->A,"%g ","");CHKERRQ(ierr); 1082cb2480eaSBarry Smith ierr = PLA_Global_show("B = ",luB->A,"%g ","");CHKERRQ(ierr); 10836e6f9017SBarry Smith */ 1084cb2480eaSBarry Smith 10852fbe02b9SBarry Smith /* do the multiply in PLA */ 108679b0a62dSBarry Smith ierr = PLA_Create_constants_conf_to(luA->A,NULL,NULL,&alpha);CHKERRQ(ierr); 108779b0a62dSBarry Smith ierr = PLA_Create_constants_conf_to(luC->A,NULL,&beta,NULL);CHKERRQ(ierr); 108879b0a62dSBarry Smith CHKMEMQ; 10892fbe02b9SBarry Smith 1090f0e3846dSSatish Balay ierr = PLA_Gemm(PLA_NO_TRANSPOSE,PLA_NO_TRANSPOSE,alpha,luA->A,luB->A,beta,luC->A); /* CHKERRQ(ierr); */ 109179b0a62dSBarry Smith CHKMEMQ; 10922fbe02b9SBarry Smith ierr = PLA_Obj_free(&alpha);CHKERRQ(ierr); 10932fbe02b9SBarry Smith ierr = PLA_Obj_free(&beta);CHKERRQ(ierr); 10942fbe02b9SBarry Smith 10956e6f9017SBarry Smith /* 1096cb2480eaSBarry Smith ierr = PLA_Global_show("C = ",luC->A,"%g ","");CHKERRQ(ierr); 10976e6f9017SBarry Smith */ 10982fbe02b9SBarry Smith ierr = MatMPIDenseCopyFromPlapack(C,C);CHKERRQ(ierr); 10992fbe02b9SBarry Smith PetscFunctionReturn(0); 11002fbe02b9SBarry Smith } 11012fbe02b9SBarry Smith 11022fbe02b9SBarry Smith #undef __FUNCT__ 11032fbe02b9SBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIDense" 11042fbe02b9SBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIDense(Mat A,Mat B,PetscReal fill,Mat *C) 11052fbe02b9SBarry Smith { 11062fbe02b9SBarry Smith PetscErrorCode ierr; 1107d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 11082fbe02b9SBarry Smith Mat Cmat; 11092fbe02b9SBarry Smith 11102fbe02b9SBarry Smith PetscFunctionBegin; 1111e7e72b3dSBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(((PetscObject)A)->comm,PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 1112e7e72b3dSBarry Smith SETERRQ(((PetscObject)A)->comm,PETSC_ERR_LIB,"Due to apparent bugs in PLAPACK,this is not currently supported"); 11132fbe02b9SBarry Smith ierr = MatCreate(((PetscObject)B)->comm,&Cmat);CHKERRQ(ierr); 1114d0f46423SBarry Smith ierr = MatSetSizes(Cmat,m,n,A->rmap->N,B->cmap->N);CHKERRQ(ierr); 11152fbe02b9SBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 11162fbe02b9SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 11172fbe02b9SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 111885bd4cefSHong Zhang 11192fbe02b9SBarry Smith *C = Cmat; 11202fbe02b9SBarry Smith PetscFunctionReturn(0); 11212fbe02b9SBarry Smith } 11222fbe02b9SBarry Smith 11232fbe02b9SBarry Smith #undef __FUNCT__ 11242fbe02b9SBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIDense" 11252fbe02b9SBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIDense(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 11262fbe02b9SBarry Smith { 11272fbe02b9SBarry Smith PetscErrorCode ierr; 11282fbe02b9SBarry Smith 11292fbe02b9SBarry Smith PetscFunctionBegin; 11302fbe02b9SBarry Smith if (scall == MAT_INITIAL_MATRIX){ 11312fbe02b9SBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIDense(A,B,fill,C);CHKERRQ(ierr); 11322fbe02b9SBarry Smith } 11332fbe02b9SBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIDense(A,B,*C);CHKERRQ(ierr); 11342fbe02b9SBarry Smith PetscFunctionReturn(0); 11352fbe02b9SBarry Smith } 11362fbe02b9SBarry Smith 1137ab235cb6SHong Zhang #undef __FUNCT__ 11386c995c7dSHong Zhang #define __FUNCT__ "MatSolve_MPIDense" 11396c995c7dSHong Zhang PetscErrorCode MatSolve_MPIDense(Mat A,Vec b,Vec x) 11406c995c7dSHong Zhang { 11416c995c7dSHong Zhang MPI_Comm comm = ((PetscObject)A)->comm; 11426c995c7dSHong Zhang Mat_Plapack *lu = (Mat_Plapack*)A->spptr; 11436c995c7dSHong Zhang PetscErrorCode ierr; 1144d0f46423SBarry Smith PetscInt M=A->rmap->N,m=A->rmap->n,rstart,i,j,*idx_pla,*idx_petsc,loc_m,loc_stride; 11456c995c7dSHong Zhang PetscScalar *array; 11466c995c7dSHong Zhang PetscReal one = 1.0; 11476c995c7dSHong Zhang PetscMPIInt size,rank,r_rank,r_nproc,c_rank,c_nproc;; 11486c995c7dSHong Zhang PLA_Obj v_pla = NULL; 11496c995c7dSHong Zhang PetscScalar *loc_buf; 11506c995c7dSHong Zhang Vec loc_x; 11516c995c7dSHong Zhang 11526c995c7dSHong Zhang PetscFunctionBegin; 11536c995c7dSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 11546c995c7dSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 11556c995c7dSHong Zhang 11566c995c7dSHong Zhang /* Create PLAPACK vector objects, then copy b into PLAPACK b */ 11576c995c7dSHong Zhang PLA_Mvector_create(lu->datatype,M,1,lu->templ,PLA_ALIGN_FIRST,&v_pla); 11586c995c7dSHong Zhang PLA_Obj_set_to_zero(v_pla); 11596c995c7dSHong Zhang 11606c995c7dSHong Zhang /* Copy b into rhs_pla */ 11616c995c7dSHong Zhang PLA_API_begin(); 11626c995c7dSHong Zhang PLA_Obj_API_open(v_pla); 11636c995c7dSHong Zhang ierr = VecGetArray(b,&array);CHKERRQ(ierr); 11646c995c7dSHong Zhang PLA_API_axpy_vector_to_global(m,&one,(void *)array,1,v_pla,lu->rstart); 11656c995c7dSHong Zhang ierr = VecRestoreArray(b,&array);CHKERRQ(ierr); 11666c995c7dSHong Zhang PLA_Obj_API_close(v_pla); 11676c995c7dSHong Zhang PLA_API_end(); 11686c995c7dSHong Zhang 1169d5f3da31SBarry Smith if (A->factortype == MAT_FACTOR_LU){ 11706c995c7dSHong Zhang /* Apply the permutations to the right hand sides */ 11716c995c7dSHong Zhang PLA_Apply_pivots_to_rows (v_pla,lu->pivots); 11726c995c7dSHong Zhang 11736c995c7dSHong Zhang /* Solve L y = b, overwriting b with y */ 11746c995c7dSHong Zhang PLA_Trsv( PLA_LOWER_TRIANGULAR,PLA_NO_TRANSPOSE,PLA_UNIT_DIAG,lu->A,v_pla ); 11756c995c7dSHong Zhang 11766c995c7dSHong Zhang /* Solve U x = y (=b), overwriting b with x */ 11776c995c7dSHong Zhang PLA_Trsv( PLA_UPPER_TRIANGULAR,PLA_NO_TRANSPOSE,PLA_NONUNIT_DIAG,lu->A,v_pla ); 11786c995c7dSHong Zhang } else { /* MAT_FACTOR_CHOLESKY */ 11796c995c7dSHong Zhang PLA_Trsv( PLA_LOWER_TRIANGULAR,PLA_NO_TRANSPOSE,PLA_NONUNIT_DIAG,lu->A,v_pla); 11806c995c7dSHong Zhang PLA_Trsv( PLA_LOWER_TRIANGULAR,(lu->datatype == MPI_DOUBLE ? PLA_TRANSPOSE : PLA_CONJUGATE_TRANSPOSE), 11816c995c7dSHong Zhang PLA_NONUNIT_DIAG,lu->A,v_pla); 11826c995c7dSHong Zhang } 11836c995c7dSHong Zhang 11846c995c7dSHong Zhang /* Copy PLAPACK x into Petsc vector x */ 11856c995c7dSHong Zhang PLA_Obj_local_length(v_pla, &loc_m); 11866c995c7dSHong Zhang PLA_Obj_local_buffer(v_pla, (void**)&loc_buf); 11876c995c7dSHong Zhang PLA_Obj_local_stride(v_pla, &loc_stride); 11886c995c7dSHong Zhang /* 11896c995c7dSHong Zhang PetscPrintf(PETSC_COMM_SELF," [%d] b - local_m %d local_stride %d, loc_buf: %g %g, nb: %d\n",rank,loc_m,loc_stride,loc_buf[0],loc_buf[(loc_m-1)*loc_stride],lu->nb); 11906c995c7dSHong Zhang */ 11916c995c7dSHong Zhang ierr = VecCreateSeqWithArray(PETSC_COMM_SELF,loc_m*loc_stride,loc_buf,&loc_x);CHKERRQ(ierr); 11926c995c7dSHong Zhang if (!lu->pla_solved){ 11936c995c7dSHong Zhang 11945d00a290SHong Zhang PLA_Temp_comm_row_info(lu->templ,&Plapack_comm_2d,&r_rank,&r_nproc); 11955d00a290SHong Zhang PLA_Temp_comm_col_info(lu->templ,&Plapack_comm_2d,&c_rank,&c_nproc); 11966c995c7dSHong Zhang 11976c995c7dSHong Zhang /* Create IS and cts for VecScatterring */ 11986c995c7dSHong Zhang PLA_Obj_local_length(v_pla, &loc_m); 11996c995c7dSHong Zhang PLA_Obj_local_stride(v_pla, &loc_stride); 120074ed9c26SBarry Smith ierr = PetscMalloc2(loc_m,PetscInt,&idx_pla,loc_m,PetscInt,&idx_petsc);CHKERRQ(ierr); 12016c995c7dSHong Zhang 12026c995c7dSHong Zhang rstart = (r_rank*c_nproc+c_rank)*lu->nb; 12036c995c7dSHong Zhang for (i=0; i<loc_m; i+=lu->nb){ 12046c995c7dSHong Zhang j = 0; 12056c995c7dSHong Zhang while (j < lu->nb && i+j < loc_m){ 12066c995c7dSHong Zhang idx_petsc[i+j] = rstart + j; j++; 12076c995c7dSHong Zhang } 12086c995c7dSHong Zhang rstart += size*lu->nb; 12096c995c7dSHong Zhang } 12106c995c7dSHong Zhang 12116c995c7dSHong Zhang for (i=0; i<loc_m; i++) idx_pla[i] = i*loc_stride; 12126c995c7dSHong Zhang 121370b3c8c7SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,loc_m,idx_pla,PETSC_COPY_VALUES,&lu->is_pla);CHKERRQ(ierr); 121470b3c8c7SBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,loc_m,idx_petsc,PETSC_COPY_VALUES,&lu->is_petsc);CHKERRQ(ierr); 121574ed9c26SBarry Smith ierr = PetscFree2(idx_pla,idx_petsc);CHKERRQ(ierr); 12166c995c7dSHong Zhang ierr = VecScatterCreate(loc_x,lu->is_pla,x,lu->is_petsc,&lu->ctx);CHKERRQ(ierr); 12176c995c7dSHong Zhang } 12186c995c7dSHong Zhang ierr = VecScatterBegin(lu->ctx,loc_x,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12196c995c7dSHong Zhang ierr = VecScatterEnd(lu->ctx,loc_x,x,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12206c995c7dSHong Zhang 12216c995c7dSHong Zhang /* Free data */ 12226c995c7dSHong Zhang ierr = VecDestroy(loc_x);CHKERRQ(ierr); 12236c995c7dSHong Zhang PLA_Obj_free(&v_pla); 12246c995c7dSHong Zhang 12256c995c7dSHong Zhang lu->pla_solved = PETSC_TRUE; 12266c995c7dSHong Zhang PetscFunctionReturn(0); 12276c995c7dSHong Zhang } 12286c995c7dSHong Zhang 12296c995c7dSHong Zhang #undef __FUNCT__ 12306c995c7dSHong Zhang #define __FUNCT__ "MatLUFactorNumeric_MPIDense" 12310481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIDense(Mat F,Mat A,const MatFactorInfo *info) 12326c995c7dSHong Zhang { 1233719d5645SBarry Smith Mat_Plapack *lu = (Mat_Plapack*)(F)->spptr; 12346c995c7dSHong Zhang PetscErrorCode ierr; 1235d0f46423SBarry Smith PetscInt M=A->rmap->N,m=A->rmap->n,rstart,rend; 12366c995c7dSHong Zhang PetscInt info_pla=0; 12376c995c7dSHong Zhang PetscScalar *array,one = 1.0; 12386c995c7dSHong Zhang 12396c995c7dSHong Zhang PetscFunctionBegin; 12406c995c7dSHong Zhang if (lu->mstruct == SAME_NONZERO_PATTERN){ 12416c995c7dSHong Zhang PLA_Obj_free(&lu->A); 12426c995c7dSHong Zhang PLA_Obj_free (&lu->pivots); 12436c995c7dSHong Zhang } 12446c995c7dSHong Zhang /* Create PLAPACK matrix object */ 12456c995c7dSHong Zhang lu->A = NULL; lu->pivots = NULL; 12466c995c7dSHong Zhang PLA_Matrix_create(lu->datatype,M,M,lu->templ,PLA_ALIGN_FIRST,PLA_ALIGN_FIRST,&lu->A); 12476c995c7dSHong Zhang PLA_Obj_set_to_zero(lu->A); 12486c995c7dSHong Zhang PLA_Mvector_create(MPI_INT,M,1,lu->templ,PLA_ALIGN_FIRST,&lu->pivots); 12496c995c7dSHong Zhang 12506c995c7dSHong Zhang /* Copy A into lu->A */ 12516c995c7dSHong Zhang PLA_API_begin(); 12526c995c7dSHong Zhang PLA_Obj_API_open(lu->A); 12536c995c7dSHong Zhang ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 12546c995c7dSHong Zhang ierr = MatGetArray(A,&array);CHKERRQ(ierr); 12556c995c7dSHong Zhang PLA_API_axpy_matrix_to_global(m,M, &one,(void *)array,m,lu->A,rstart,0); 12566c995c7dSHong Zhang ierr = MatRestoreArray(A,&array);CHKERRQ(ierr); 12576c995c7dSHong Zhang PLA_Obj_API_close(lu->A); 12586c995c7dSHong Zhang PLA_API_end(); 12596c995c7dSHong Zhang 12606c995c7dSHong Zhang /* Factor P A -> L U overwriting lower triangular portion of A with L, upper, U */ 12616c995c7dSHong Zhang info_pla = PLA_LU(lu->A,lu->pivots); 1262effbc4beSBarry Smith if (info_pla != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_MAT_LU_ZRPVT,"Zero pivot encountered at row %d from PLA_LU()",info_pla); 12636c995c7dSHong Zhang 12646c995c7dSHong Zhang lu->rstart = rstart; 12656c995c7dSHong Zhang lu->mstruct = SAME_NONZERO_PATTERN; 1266719d5645SBarry Smith F->ops->solve = MatSolve_MPIDense; 1267719d5645SBarry Smith F->assembled = PETSC_TRUE; /* required by -ksp_view */ 12686c995c7dSHong Zhang PetscFunctionReturn(0); 12696c995c7dSHong Zhang } 12706c995c7dSHong Zhang 12716c995c7dSHong Zhang #undef __FUNCT__ 12726c995c7dSHong Zhang #define __FUNCT__ "MatCholeskyFactorNumeric_MPIDense" 12730481f469SBarry Smith PetscErrorCode MatCholeskyFactorNumeric_MPIDense(Mat F,Mat A,const MatFactorInfo *info) 12746c995c7dSHong Zhang { 1275719d5645SBarry Smith Mat_Plapack *lu = (Mat_Plapack*)F->spptr; 12766c995c7dSHong Zhang PetscErrorCode ierr; 1277d0f46423SBarry Smith PetscInt M=A->rmap->N,m=A->rmap->n,rstart,rend; 12786c995c7dSHong Zhang PetscInt info_pla=0; 12796c995c7dSHong Zhang PetscScalar *array,one = 1.0; 12806c995c7dSHong Zhang 12816c995c7dSHong Zhang PetscFunctionBegin; 12826c995c7dSHong Zhang if (lu->mstruct == SAME_NONZERO_PATTERN){ 12836c995c7dSHong Zhang PLA_Obj_free(&lu->A); 12846c995c7dSHong Zhang } 12856c995c7dSHong Zhang /* Create PLAPACK matrix object */ 12866c995c7dSHong Zhang lu->A = NULL; 12876c995c7dSHong Zhang lu->pivots = NULL; 12886c995c7dSHong Zhang PLA_Matrix_create(lu->datatype,M,M,lu->templ,PLA_ALIGN_FIRST,PLA_ALIGN_FIRST,&lu->A); 12896c995c7dSHong Zhang 12906c995c7dSHong Zhang /* Copy A into lu->A */ 12916c995c7dSHong Zhang PLA_API_begin(); 12926c995c7dSHong Zhang PLA_Obj_API_open(lu->A); 12936c995c7dSHong Zhang ierr = MatGetOwnershipRange(A,&rstart,&rend);CHKERRQ(ierr); 12946c995c7dSHong Zhang ierr = MatGetArray(A,&array);CHKERRQ(ierr); 12956c995c7dSHong Zhang PLA_API_axpy_matrix_to_global(m,M, &one,(void *)array,m,lu->A,rstart,0); 12966c995c7dSHong Zhang ierr = MatRestoreArray(A,&array);CHKERRQ(ierr); 12976c995c7dSHong Zhang PLA_Obj_API_close(lu->A); 12986c995c7dSHong Zhang PLA_API_end(); 12996c995c7dSHong Zhang 13006c995c7dSHong Zhang /* Factor P A -> Chol */ 13016c995c7dSHong Zhang info_pla = PLA_Chol(PLA_LOWER_TRIANGULAR,lu->A); 1302effbc4beSBarry Smith if (info_pla != 0) SETERRQ1(PETSC_COMM_SELF, PETSC_ERR_MAT_CH_ZRPVT,"Nonpositive definite matrix detected at row %d from PLA_Chol()",info_pla); 13036c995c7dSHong Zhang 13046c995c7dSHong Zhang lu->rstart = rstart; 13056c995c7dSHong Zhang lu->mstruct = SAME_NONZERO_PATTERN; 1306719d5645SBarry Smith F->ops->solve = MatSolve_MPIDense; 1307719d5645SBarry Smith F->assembled = PETSC_TRUE; /* required by -ksp_view */ 13086c995c7dSHong Zhang PetscFunctionReturn(0); 13096c995c7dSHong Zhang } 13106c995c7dSHong Zhang 1311b24902e0SBarry Smith /* Note the Petsc perm permutation is ignored */ 13122fbe02b9SBarry Smith #undef __FUNCT__ 13136c995c7dSHong Zhang #define __FUNCT__ "MatCholeskyFactorSymbolic_MPIDense" 13140481f469SBarry Smith PetscErrorCode MatCholeskyFactorSymbolic_MPIDense(Mat F,Mat A,IS perm,const MatFactorInfo *info) 131501b82886SBarry Smith { 131601b82886SBarry Smith PetscErrorCode ierr; 1317ace3abfcSBarry Smith PetscBool issymmetric,set; 131801b82886SBarry Smith 131901b82886SBarry Smith PetscFunctionBegin; 1320b24902e0SBarry Smith ierr = MatIsSymmetricKnown(A,&set,&issymmetric);CHKERRQ(ierr); 1321e7e72b3dSBarry Smith if (!set || !issymmetric) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_USER,"Matrix must be set as MAT_SYMMETRIC for CholeskyFactor()"); 1322719d5645SBarry Smith F->ops->choleskyfactornumeric = MatCholeskyFactorNumeric_MPIDense; 1323b24902e0SBarry Smith PetscFunctionReturn(0); 132401b82886SBarry Smith } 132501b82886SBarry Smith 1326b24902e0SBarry Smith /* Note the Petsc r and c permutations are ignored */ 1327b24902e0SBarry Smith #undef __FUNCT__ 13286c995c7dSHong Zhang #define __FUNCT__ "MatLUFactorSymbolic_MPIDense" 13290481f469SBarry Smith PetscErrorCode MatLUFactorSymbolic_MPIDense(Mat F,Mat A,IS r,IS c,const MatFactorInfo *info) 1330b24902e0SBarry Smith { 1331b24902e0SBarry Smith PetscErrorCode ierr; 1332d0f46423SBarry Smith PetscInt M = A->rmap->N; 1333b24902e0SBarry Smith Mat_Plapack *lu; 133401b82886SBarry Smith 1335b24902e0SBarry Smith PetscFunctionBegin; 1336719d5645SBarry Smith lu = (Mat_Plapack*)F->spptr; 1337b24902e0SBarry Smith ierr = PLA_Mvector_create(MPI_INT,M,1,lu->templ,PLA_ALIGN_FIRST,&lu->pivots);CHKERRQ(ierr); 1338719d5645SBarry Smith F->ops->lufactornumeric = MatLUFactorNumeric_MPIDense; 133901b82886SBarry Smith PetscFunctionReturn(0); 134001b82886SBarry Smith } 134101b82886SBarry Smith 1342b6806ab0SHong Zhang EXTERN_C_BEGIN 134301b82886SBarry Smith #undef __FUNCT__ 1344b6806ab0SHong Zhang #define __FUNCT__ "MatFactorGetSolverPackage_mpidense_plapack" 1345b6806ab0SHong Zhang PetscErrorCode MatFactorGetSolverPackage_mpidense_plapack(Mat A,const MatSolverPackage *type) 1346b6806ab0SHong Zhang { 1347b6806ab0SHong Zhang PetscFunctionBegin; 13482692d6eeSBarry Smith *type = MATSOLVERPLAPACK; 1349b6806ab0SHong Zhang PetscFunctionReturn(0); 1350b6806ab0SHong Zhang } 1351b6806ab0SHong Zhang EXTERN_C_END 1352b6806ab0SHong Zhang 1353bb5747d9SMatthew Knepley EXTERN_C_BEGIN 1354b6806ab0SHong Zhang #undef __FUNCT__ 1355b6806ab0SHong Zhang #define __FUNCT__ "MatGetFactor_mpidense_plapack" 1356b6806ab0SHong Zhang PetscErrorCode MatGetFactor_mpidense_plapack(Mat A,MatFactorType ftype,Mat *F) 135701b82886SBarry Smith { 135801b82886SBarry Smith PetscErrorCode ierr; 13596c995c7dSHong Zhang Mat_Plapack *lu; 13606c995c7dSHong Zhang PetscMPIInt size; 136185bd4cefSHong Zhang PetscInt M=A->rmap->N; 136201b82886SBarry Smith 136301b82886SBarry Smith PetscFunctionBegin; 136401b82886SBarry Smith /* Create the factorization matrix */ 1365eae6fb2eSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,F);CHKERRQ(ierr); 1366d0f46423SBarry Smith ierr = MatSetSizes(*F,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 1367eae6fb2eSBarry Smith ierr = MatSetType(*F,((PetscObject)A)->type_name);CHKERRQ(ierr); 136885bd4cefSHong Zhang ierr = PetscNewLog(*F,Mat_Plapack,&lu);CHKERRQ(ierr); 136985bd4cefSHong Zhang (*F)->spptr = (void*)lu; 137001b82886SBarry Smith 1371b24902e0SBarry Smith /* Set default Plapack parameters */ 13726c995c7dSHong Zhang ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 1373b24902e0SBarry Smith lu->nb = M/size; 1374b24902e0SBarry Smith if (M - lu->nb*size) lu->nb++; /* without cyclic distribution */ 137501b82886SBarry Smith 1376b24902e0SBarry Smith /* Set runtime options */ 1377b24902e0SBarry Smith ierr = PetscOptionsBegin(((PetscObject)A)->comm,((PetscObject)A)->prefix,"PLAPACK Options","Mat");CHKERRQ(ierr); 1378b24902e0SBarry Smith ierr = PetscOptionsInt("-mat_plapack_nb","block size of template vector","None",lu->nb,&lu->nb,PETSC_NULL);CHKERRQ(ierr); 1379b24902e0SBarry Smith PetscOptionsEnd(); 138001b82886SBarry Smith 1381b24902e0SBarry Smith /* Create object distribution template */ 1382b24902e0SBarry Smith lu->templ = NULL; 1383b24902e0SBarry Smith ierr = PLA_Temp_create(lu->nb, 0, &lu->templ);CHKERRQ(ierr); 1384b24902e0SBarry Smith 1385b24902e0SBarry Smith /* Set the datatype */ 1386b24902e0SBarry Smith #if defined(PETSC_USE_COMPLEX) 1387b24902e0SBarry Smith lu->datatype = MPI_DOUBLE_COMPLEX; 1388b24902e0SBarry Smith #else 1389b24902e0SBarry Smith lu->datatype = MPI_DOUBLE; 1390b24902e0SBarry Smith #endif 1391b24902e0SBarry Smith 1392d0f46423SBarry Smith ierr = PLA_Matrix_create(lu->datatype,M,A->cmap->N,lu->templ,PLA_ALIGN_FIRST,PLA_ALIGN_FIRST,&lu->A);CHKERRQ(ierr); 1393b24902e0SBarry Smith 1394b24902e0SBarry Smith 1395b24902e0SBarry Smith lu->pla_solved = PETSC_FALSE; /* MatSolve_Plapack() is called yet */ 13965d00a290SHong Zhang lu->mstruct = DIFFERENT_NONZERO_PATTERN; 1397b24902e0SBarry Smith 1398b24902e0SBarry Smith if (ftype == MAT_FACTOR_LU) { 1399b24902e0SBarry Smith (*F)->ops->lufactorsymbolic = MatLUFactorSymbolic_MPIDense; 1400b24902e0SBarry Smith } else if (ftype == MAT_FACTOR_CHOLESKY) { 14016c995c7dSHong Zhang (*F)->ops->choleskyfactorsymbolic = MatCholeskyFactorSymbolic_MPIDense; 1402e7e72b3dSBarry Smith } else SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"No incomplete factorizations for dense matrices"); 1403d5f3da31SBarry Smith (*F)->factortype = ftype; 1404b6806ab0SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)(*F),"MatFactorGetSolverPackage_C","MatFactorGetSolverPackage_mpidense_plapack",MatFactorGetSolverPackage_mpidense_plapack);CHKERRQ(ierr); 140501b82886SBarry Smith PetscFunctionReturn(0); 140601b82886SBarry Smith } 1407bb5747d9SMatthew Knepley EXTERN_C_END 140801b82886SBarry Smith #endif 140901b82886SBarry Smith 1410488007eeSBarry Smith #undef __FUNCT__ 141130716080SHong Zhang #define __FUNCT__ "MatGetFactor_mpidense_petsc" 141230716080SHong Zhang PetscErrorCode MatGetFactor_mpidense_petsc(Mat A,MatFactorType ftype,Mat *F) 141330716080SHong Zhang { 141430716080SHong Zhang #if defined(PETSC_HAVE_PLAPACK) 141530716080SHong Zhang PetscErrorCode ierr; 141630716080SHong Zhang #endif 141730716080SHong Zhang 141830716080SHong Zhang PetscFunctionBegin; 141930716080SHong Zhang #if defined(PETSC_HAVE_PLAPACK) 142030716080SHong Zhang ierr = MatGetFactor_mpidense_plapack(A,ftype,F);CHKERRQ(ierr); 142130716080SHong Zhang #else 1422e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Matrix format %s uses PLAPACK direct solver. Install PLAPACK",((PetscObject)A)->type_name); 142330716080SHong Zhang #endif 142430716080SHong Zhang PetscFunctionReturn(0); 142530716080SHong Zhang } 142630716080SHong Zhang 142730716080SHong Zhang #undef __FUNCT__ 1428488007eeSBarry Smith #define __FUNCT__ "MatAXPY_MPIDense" 1429488007eeSBarry Smith PetscErrorCode MatAXPY_MPIDense(Mat Y,PetscScalar alpha,Mat X,MatStructure str) 1430488007eeSBarry Smith { 1431488007eeSBarry Smith PetscErrorCode ierr; 1432488007eeSBarry Smith Mat_MPIDense *A = (Mat_MPIDense*)Y->data, *B = (Mat_MPIDense*)X->data; 1433488007eeSBarry Smith 1434488007eeSBarry Smith PetscFunctionBegin; 1435488007eeSBarry Smith ierr = MatAXPY(A->A,alpha,B->A,str);CHKERRQ(ierr); 1436488007eeSBarry Smith PetscFunctionReturn(0); 1437488007eeSBarry Smith } 1438488007eeSBarry Smith 1439ba337c44SJed Brown #undef __FUNCT__ 1440ba337c44SJed Brown #define __FUNCT__ "MatConjugate_MPIDense" 1441ba337c44SJed Brown PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIDense(Mat mat) 1442ba337c44SJed Brown { 1443ba337c44SJed Brown Mat_MPIDense *a = (Mat_MPIDense *)mat->data; 1444ba337c44SJed Brown PetscErrorCode ierr; 1445ba337c44SJed Brown 1446ba337c44SJed Brown PetscFunctionBegin; 1447ba337c44SJed Brown ierr = MatConjugate(a->A);CHKERRQ(ierr); 1448ba337c44SJed Brown PetscFunctionReturn(0); 1449ba337c44SJed Brown } 1450ba337c44SJed Brown 1451ba337c44SJed Brown #undef __FUNCT__ 1452ba337c44SJed Brown #define __FUNCT__ "MatRealPart_MPIDense" 1453ba337c44SJed Brown PetscErrorCode MatRealPart_MPIDense(Mat A) 1454ba337c44SJed Brown { 1455ba337c44SJed Brown Mat_MPIDense *a = (Mat_MPIDense*)A->data; 1456ba337c44SJed Brown PetscErrorCode ierr; 1457ba337c44SJed Brown 1458ba337c44SJed Brown PetscFunctionBegin; 1459ba337c44SJed Brown ierr = MatRealPart(a->A);CHKERRQ(ierr); 1460ba337c44SJed Brown PetscFunctionReturn(0); 1461ba337c44SJed Brown } 1462ba337c44SJed Brown 1463ba337c44SJed Brown #undef __FUNCT__ 1464ba337c44SJed Brown #define __FUNCT__ "MatImaginaryPart_MPIDense" 1465ba337c44SJed Brown PetscErrorCode MatImaginaryPart_MPIDense(Mat A) 1466ba337c44SJed Brown { 1467ba337c44SJed Brown Mat_MPIDense *a = (Mat_MPIDense*)A->data; 1468ba337c44SJed Brown PetscErrorCode ierr; 1469ba337c44SJed Brown 1470ba337c44SJed Brown PetscFunctionBegin; 1471ba337c44SJed Brown ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 1472ba337c44SJed Brown PetscFunctionReturn(0); 1473ba337c44SJed Brown } 1474ba337c44SJed Brown 14758965ea79SLois Curfman McInnes /* -------------------------------------------------------------------*/ 147609dc0095SBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIDense, 147709dc0095SBarry Smith MatGetRow_MPIDense, 147809dc0095SBarry Smith MatRestoreRow_MPIDense, 147909dc0095SBarry Smith MatMult_MPIDense, 148097304618SKris Buschelman /* 4*/ MatMultAdd_MPIDense, 14817c922b88SBarry Smith MatMultTranspose_MPIDense, 14827c922b88SBarry Smith MatMultTransposeAdd_MPIDense, 14838965ea79SLois Curfman McInnes 0, 148409dc0095SBarry Smith 0, 148509dc0095SBarry Smith 0, 148697304618SKris Buschelman /*10*/ 0, 148709dc0095SBarry Smith 0, 148809dc0095SBarry Smith 0, 148909dc0095SBarry Smith 0, 149009dc0095SBarry Smith MatTranspose_MPIDense, 149197304618SKris Buschelman /*15*/ MatGetInfo_MPIDense, 14926e4ee0c6SHong Zhang MatEqual_MPIDense, 149309dc0095SBarry Smith MatGetDiagonal_MPIDense, 14945b2fa520SLois Curfman McInnes MatDiagonalScale_MPIDense, 149509dc0095SBarry Smith MatNorm_MPIDense, 149697304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIDense, 149709dc0095SBarry Smith MatAssemblyEnd_MPIDense, 149809dc0095SBarry Smith MatSetOption_MPIDense, 149909dc0095SBarry Smith MatZeroEntries_MPIDense, 1500d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIDense, 1501919b68f7SBarry Smith 0, 150201b82886SBarry Smith 0, 150301b82886SBarry Smith 0, 150401b82886SBarry Smith 0, 1505d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIDense, 1506273d9f13SBarry Smith 0, 150709dc0095SBarry Smith 0, 150809dc0095SBarry Smith MatGetArray_MPIDense, 150909dc0095SBarry Smith MatRestoreArray_MPIDense, 1510d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIDense, 151109dc0095SBarry Smith 0, 151209dc0095SBarry Smith 0, 151309dc0095SBarry Smith 0, 151409dc0095SBarry Smith 0, 1515d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIDense, 15162ce60cd0SSatish Balay MatGetSubMatrices_MPIDense, 151709dc0095SBarry Smith 0, 151809dc0095SBarry Smith MatGetValues_MPIDense, 151909dc0095SBarry Smith 0, 1520d519adbfSMatthew Knepley /*44*/ 0, 152109dc0095SBarry Smith MatScale_MPIDense, 152209dc0095SBarry Smith 0, 152309dc0095SBarry Smith 0, 152409dc0095SBarry Smith 0, 1525d519adbfSMatthew Knepley /*49*/ 0, 152609dc0095SBarry Smith 0, 152709dc0095SBarry Smith 0, 152809dc0095SBarry Smith 0, 152909dc0095SBarry Smith 0, 1530d519adbfSMatthew Knepley /*54*/ 0, 153109dc0095SBarry Smith 0, 153209dc0095SBarry Smith 0, 153309dc0095SBarry Smith 0, 153409dc0095SBarry Smith 0, 1535d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIDense, 1536b9b97703SBarry Smith MatDestroy_MPIDense, 1537b9b97703SBarry Smith MatView_MPIDense, 1538357abbc8SBarry Smith 0, 153997304618SKris Buschelman 0, 1540d519adbfSMatthew Knepley /*64*/ 0, 154197304618SKris Buschelman 0, 154297304618SKris Buschelman 0, 154397304618SKris Buschelman 0, 154497304618SKris Buschelman 0, 1545d519adbfSMatthew Knepley /*69*/ 0, 154697304618SKris Buschelman 0, 154797304618SKris Buschelman 0, 154897304618SKris Buschelman 0, 154997304618SKris Buschelman 0, 1550d519adbfSMatthew Knepley /*74*/ 0, 155197304618SKris Buschelman 0, 155297304618SKris Buschelman 0, 155397304618SKris Buschelman 0, 155497304618SKris Buschelman 0, 1555d519adbfSMatthew Knepley /*79*/ 0, 155697304618SKris Buschelman 0, 155797304618SKris Buschelman 0, 155897304618SKris Buschelman 0, 15595bba2384SShri Abhyankar /*83*/ MatLoad_MPIDense, 1560865e5f61SKris Buschelman 0, 1561865e5f61SKris Buschelman 0, 1562865e5f61SKris Buschelman 0, 1563865e5f61SKris Buschelman 0, 1564865e5f61SKris Buschelman 0, 1565d519adbfSMatthew Knepley /*89*/ 15662fbe02b9SBarry Smith #if defined(PETSC_HAVE_PLAPACK) 15672fbe02b9SBarry Smith MatMatMult_MPIDense_MPIDense, 15684ae313f4SHong Zhang MatMatMultSymbolic_MPIDense_MPIDense, 15692fbe02b9SBarry Smith MatMatMultNumeric_MPIDense_MPIDense, 15702fbe02b9SBarry Smith #else 1571865e5f61SKris Buschelman 0, 1572865e5f61SKris Buschelman 0, 1573865e5f61SKris Buschelman 0, 15742fbe02b9SBarry Smith #endif 15752fbe02b9SBarry Smith 0, 1576ba337c44SJed Brown 0, 1577d519adbfSMatthew Knepley /*94*/ 0, 1578865e5f61SKris Buschelman 0, 1579865e5f61SKris Buschelman 0, 1580ba337c44SJed Brown 0, 1581ba337c44SJed Brown 0, 1582ba337c44SJed Brown /*99*/ 0, 1583ba337c44SJed Brown 0, 1584ba337c44SJed Brown 0, 1585ba337c44SJed Brown MatConjugate_MPIDense, 1586ba337c44SJed Brown 0, 1587ba337c44SJed Brown /*104*/0, 1588ba337c44SJed Brown MatRealPart_MPIDense, 1589ba337c44SJed Brown MatImaginaryPart_MPIDense, 159086d161a7SShri Abhyankar 0, 159186d161a7SShri Abhyankar 0, 159286d161a7SShri Abhyankar /*109*/0, 159386d161a7SShri Abhyankar 0, 159486d161a7SShri Abhyankar 0, 159586d161a7SShri Abhyankar 0, 159686d161a7SShri Abhyankar 0, 159786d161a7SShri Abhyankar /*114*/0, 159886d161a7SShri Abhyankar 0, 159986d161a7SShri Abhyankar 0, 160086d161a7SShri Abhyankar 0, 160186d161a7SShri Abhyankar 0, 160286d161a7SShri Abhyankar /*119*/0, 160386d161a7SShri Abhyankar 0, 160486d161a7SShri Abhyankar 0, 16055bba2384SShri Abhyankar 0 1606ba337c44SJed Brown }; 16078965ea79SLois Curfman McInnes 1608273d9f13SBarry Smith EXTERN_C_BEGIN 16094a2ae208SSatish Balay #undef __FUNCT__ 1610a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIDenseSetPreallocation_MPIDense" 1611be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIDenseSetPreallocation_MPIDense(Mat mat,PetscScalar *data) 1612a23d5eceSKris Buschelman { 1613a23d5eceSKris Buschelman Mat_MPIDense *a; 1614dfbe8321SBarry Smith PetscErrorCode ierr; 1615a23d5eceSKris Buschelman 1616a23d5eceSKris Buschelman PetscFunctionBegin; 1617a23d5eceSKris Buschelman mat->preallocated = PETSC_TRUE; 1618a23d5eceSKris Buschelman /* Note: For now, when data is specified above, this assumes the user correctly 1619a23d5eceSKris Buschelman allocates the local dense storage space. We should add error checking. */ 1620a23d5eceSKris Buschelman 1621a23d5eceSKris Buschelman a = (Mat_MPIDense*)mat->data; 162234ef9618SShri Abhyankar ierr = PetscLayoutSetBlockSize(mat->rmap,1);CHKERRQ(ierr); 162334ef9618SShri Abhyankar ierr = PetscLayoutSetBlockSize(mat->cmap,1);CHKERRQ(ierr); 162434ef9618SShri Abhyankar ierr = PetscLayoutSetUp(mat->rmap);CHKERRQ(ierr); 162534ef9618SShri Abhyankar ierr = PetscLayoutSetUp(mat->cmap);CHKERRQ(ierr); 162634ef9618SShri Abhyankar a->nvec = mat->cmap->n; 162734ef9618SShri Abhyankar 1628f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&a->A);CHKERRQ(ierr); 1629d0f46423SBarry Smith ierr = MatSetSizes(a->A,mat->rmap->n,mat->cmap->N,mat->rmap->n,mat->cmap->N);CHKERRQ(ierr); 16305c5985e7SKris Buschelman ierr = MatSetType(a->A,MATSEQDENSE);CHKERRQ(ierr); 16315c5985e7SKris Buschelman ierr = MatSeqDenseSetPreallocation(a->A,data);CHKERRQ(ierr); 163252e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 1633a23d5eceSKris Buschelman PetscFunctionReturn(0); 1634a23d5eceSKris Buschelman } 1635a23d5eceSKris Buschelman EXTERN_C_END 1636a23d5eceSKris Buschelman 16370bad9183SKris Buschelman /*MC 16382692d6eeSBarry Smith MATSOLVERPLAPACK = "mpidense" - Parallel LU and Cholesky factorization for MATMPIDENSE matrices 16390bad9183SKris Buschelman 1640e2e64c6bSBarry Smith run ./configure with the option --download-plapack 16417878bbefSBarry Smith 16427878bbefSBarry Smith 16437878bbefSBarry Smith Options Database Keys: 16447878bbefSBarry Smith . -mat_plapack_nprows <n> - number of rows in processor partition 16457878bbefSBarry Smith . -mat_plapack_npcols <n> - number of columns in processor partition 16467878bbefSBarry Smith . -mat_plapack_nb <n> - block size of template vector 16477878bbefSBarry Smith . -mat_plapack_nb_alg <n> - algorithmic block size 16487878bbefSBarry Smith - -mat_plapack_ckerror <n> - error checking flag 16497878bbefSBarry Smith 1650f6680f47SSatish Balay Level: intermediate 1651f6680f47SSatish Balay 165241c8de11SBarry Smith .seealso: MatCreateMPIDense(), MATDENSE, MATSEQDENSE, PCFactorSetSolverPackage(), MatSolverPackage 165341c8de11SBarry Smith 16540bad9183SKris Buschelman M*/ 16550bad9183SKris Buschelman 1656a23d5eceSKris Buschelman EXTERN_C_BEGIN 1657a23d5eceSKris Buschelman #undef __FUNCT__ 16584a2ae208SSatish Balay #define __FUNCT__ "MatCreate_MPIDense" 1659be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIDense(Mat mat) 1660273d9f13SBarry Smith { 1661273d9f13SBarry Smith Mat_MPIDense *a; 1662dfbe8321SBarry Smith PetscErrorCode ierr; 1663273d9f13SBarry Smith 1664273d9f13SBarry Smith PetscFunctionBegin; 166538f2d2fdSLisandro Dalcin ierr = PetscNewLog(mat,Mat_MPIDense,&a);CHKERRQ(ierr); 1666b0a32e0cSBarry Smith mat->data = (void*)a; 1667273d9f13SBarry Smith ierr = PetscMemcpy(mat->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 1668273d9f13SBarry Smith mat->mapping = 0; 1669273d9f13SBarry Smith 1670273d9f13SBarry Smith mat->insertmode = NOT_SET_VALUES; 16717adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&a->rank);CHKERRQ(ierr); 16727adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&a->size);CHKERRQ(ierr); 1673273d9f13SBarry Smith 1674273d9f13SBarry Smith /* build cache for off array entries formed */ 1675273d9f13SBarry Smith a->donotstash = PETSC_FALSE; 16767adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)mat)->comm,1,&mat->stash);CHKERRQ(ierr); 1677273d9f13SBarry Smith 1678273d9f13SBarry Smith /* stuff used for matrix vector multiply */ 1679273d9f13SBarry Smith a->lvec = 0; 1680273d9f13SBarry Smith a->Mvctx = 0; 1681273d9f13SBarry Smith a->roworiented = PETSC_TRUE; 1682273d9f13SBarry Smith 1683273d9f13SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatGetDiagonalBlock_C", 1684273d9f13SBarry Smith "MatGetDiagonalBlock_MPIDense", 1685273d9f13SBarry Smith MatGetDiagonalBlock_MPIDense);CHKERRQ(ierr); 1686a23d5eceSKris Buschelman ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatMPIDenseSetPreallocation_C", 1687a23d5eceSKris Buschelman "MatMPIDenseSetPreallocation_MPIDense", 1688a23d5eceSKris Buschelman MatMPIDenseSetPreallocation_MPIDense);CHKERRQ(ierr); 16894ae313f4SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatMatMult_mpiaij_mpidense_C", 16904ae313f4SHong Zhang "MatMatMult_MPIAIJ_MPIDense", 16914ae313f4SHong Zhang MatMatMult_MPIAIJ_MPIDense);CHKERRQ(ierr); 16924ae313f4SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatMatMultSymbolic_mpiaij_mpidense_C", 16934ae313f4SHong Zhang "MatMatMultSymbolic_MPIAIJ_MPIDense", 16944ae313f4SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIDense);CHKERRQ(ierr); 16954ae313f4SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatMatMultNumeric_mpiaij_mpidense_C", 16964ae313f4SHong Zhang "MatMatMultNumeric_MPIAIJ_MPIDense", 16974ae313f4SHong Zhang MatMatMultNumeric_MPIAIJ_MPIDense);CHKERRQ(ierr); 1698ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatGetFactor_petsc_C", 169930716080SHong Zhang "MatGetFactor_mpidense_petsc", 170030716080SHong Zhang MatGetFactor_mpidense_petsc);CHKERRQ(ierr); 17017878bbefSBarry Smith #if defined(PETSC_HAVE_PLAPACK) 1702ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)mat,"MatGetFactor_plapack_C", 1703b6806ab0SHong Zhang "MatGetFactor_mpidense_plapack", 1704b6806ab0SHong Zhang MatGetFactor_mpidense_plapack);CHKERRQ(ierr); 1705db4efbfdSBarry Smith ierr = PetscPLAPACKInitializePackage(((PetscObject)mat)->comm);CHKERRQ(ierr); 1706db4efbfdSBarry Smith #endif 170738aed534SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)mat,MATMPIDENSE);CHKERRQ(ierr); 170801b82886SBarry Smith 1709273d9f13SBarry Smith PetscFunctionReturn(0); 1710273d9f13SBarry Smith } 1711273d9f13SBarry Smith EXTERN_C_END 1712273d9f13SBarry Smith 1713209238afSKris Buschelman /*MC 1714002d173eSKris Buschelman MATDENSE - MATDENSE = "dense" - A matrix type to be used for dense matrices. 1715209238afSKris Buschelman 1716209238afSKris Buschelman This matrix type is identical to MATSEQDENSE when constructed with a single process communicator, 1717209238afSKris Buschelman and MATMPIDENSE otherwise. 1718209238afSKris Buschelman 1719209238afSKris Buschelman Options Database Keys: 1720209238afSKris Buschelman . -mat_type dense - sets the matrix type to "dense" during a call to MatSetFromOptions() 1721209238afSKris Buschelman 1722209238afSKris Buschelman Level: beginner 1723209238afSKris Buschelman 172401b82886SBarry Smith 1725209238afSKris Buschelman .seealso: MatCreateMPIDense,MATSEQDENSE,MATMPIDENSE 1726209238afSKris Buschelman M*/ 1727209238afSKris Buschelman 1728209238afSKris Buschelman EXTERN_C_BEGIN 1729209238afSKris Buschelman #undef __FUNCT__ 1730209238afSKris Buschelman #define __FUNCT__ "MatCreate_Dense" 1731be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_Dense(Mat A) 1732dfbe8321SBarry Smith { 17336849ba73SBarry Smith PetscErrorCode ierr; 173413f74950SBarry Smith PetscMPIInt size; 1735209238afSKris Buschelman 1736209238afSKris Buschelman PetscFunctionBegin; 17377adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr); 1738209238afSKris Buschelman if (size == 1) { 1739209238afSKris Buschelman ierr = MatSetType(A,MATSEQDENSE);CHKERRQ(ierr); 1740209238afSKris Buschelman } else { 1741209238afSKris Buschelman ierr = MatSetType(A,MATMPIDENSE);CHKERRQ(ierr); 1742209238afSKris Buschelman } 1743209238afSKris Buschelman PetscFunctionReturn(0); 1744209238afSKris Buschelman } 1745209238afSKris Buschelman EXTERN_C_END 1746209238afSKris Buschelman 17474a2ae208SSatish Balay #undef __FUNCT__ 17484a2ae208SSatish Balay #define __FUNCT__ "MatMPIDenseSetPreallocation" 1749273d9f13SBarry Smith /*@C 1750273d9f13SBarry Smith MatMPIDenseSetPreallocation - Sets the array used to store the matrix entries 1751273d9f13SBarry Smith 1752273d9f13SBarry Smith Not collective 1753273d9f13SBarry Smith 1754273d9f13SBarry Smith Input Parameters: 1755273d9f13SBarry Smith . A - the matrix 1756273d9f13SBarry Smith - data - optional location of matrix data. Set data=PETSC_NULL for PETSc 1757273d9f13SBarry Smith to control all matrix memory allocation. 1758273d9f13SBarry Smith 1759273d9f13SBarry Smith Notes: 1760273d9f13SBarry Smith The dense format is fully compatible with standard Fortran 77 1761273d9f13SBarry Smith storage by columns. 1762273d9f13SBarry Smith 1763273d9f13SBarry Smith The data input variable is intended primarily for Fortran programmers 1764273d9f13SBarry Smith who wish to allocate their own matrix memory space. Most users should 1765273d9f13SBarry Smith set data=PETSC_NULL. 1766273d9f13SBarry Smith 1767273d9f13SBarry Smith Level: intermediate 1768273d9f13SBarry Smith 1769273d9f13SBarry Smith .keywords: matrix,dense, parallel 1770273d9f13SBarry Smith 1771273d9f13SBarry Smith .seealso: MatCreate(), MatCreateSeqDense(), MatSetValues() 1772273d9f13SBarry Smith @*/ 1773be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIDenseSetPreallocation(Mat mat,PetscScalar *data) 1774273d9f13SBarry Smith { 17754ac538c5SBarry Smith PetscErrorCode ierr; 1776273d9f13SBarry Smith 1777273d9f13SBarry Smith PetscFunctionBegin; 17784ac538c5SBarry Smith ierr = PetscTryMethod(mat,"MatMPIDenseSetPreallocation_C",(Mat,PetscScalar *),(mat,data));CHKERRQ(ierr); 1779273d9f13SBarry Smith PetscFunctionReturn(0); 1780273d9f13SBarry Smith } 1781273d9f13SBarry Smith 17824a2ae208SSatish Balay #undef __FUNCT__ 17834a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIDense" 17848965ea79SLois Curfman McInnes /*@C 178539ddd567SLois Curfman McInnes MatCreateMPIDense - Creates a sparse parallel matrix in dense format. 17868965ea79SLois Curfman McInnes 1787db81eaa0SLois Curfman McInnes Collective on MPI_Comm 1788db81eaa0SLois Curfman McInnes 17898965ea79SLois Curfman McInnes Input Parameters: 1790db81eaa0SLois Curfman McInnes + comm - MPI communicator 17918965ea79SLois Curfman McInnes . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 1792db81eaa0SLois Curfman McInnes . n - number of local columns (or PETSC_DECIDE to have calculated if N is given) 17938965ea79SLois Curfman McInnes . M - number of global rows (or PETSC_DECIDE to have calculated if m is given) 1794db81eaa0SLois Curfman McInnes . N - number of global columns (or PETSC_DECIDE to have calculated if n is given) 17957f5ff6fdSBarry Smith - data - optional location of matrix data. Set data=PETSC_NULL (PETSC_NULL_SCALAR for Fortran users) for PETSc 1796dfc5480cSLois Curfman McInnes to control all matrix memory allocation. 17978965ea79SLois Curfman McInnes 17988965ea79SLois Curfman McInnes Output Parameter: 1799477f1c0bSLois Curfman McInnes . A - the matrix 18008965ea79SLois Curfman McInnes 1801b259b22eSLois Curfman McInnes Notes: 180239ddd567SLois Curfman McInnes The dense format is fully compatible with standard Fortran 77 180339ddd567SLois Curfman McInnes storage by columns. 18048965ea79SLois Curfman McInnes 180518f449edSLois Curfman McInnes The data input variable is intended primarily for Fortran programmers 180618f449edSLois Curfman McInnes who wish to allocate their own matrix memory space. Most users should 18077f5ff6fdSBarry Smith set data=PETSC_NULL (PETSC_NULL_SCALAR for Fortran users). 180818f449edSLois Curfman McInnes 18098965ea79SLois Curfman McInnes The user MUST specify either the local or global matrix dimensions 18108965ea79SLois Curfman McInnes (possibly both). 18118965ea79SLois Curfman McInnes 1812027ccd11SLois Curfman McInnes Level: intermediate 1813027ccd11SLois Curfman McInnes 181439ddd567SLois Curfman McInnes .keywords: matrix,dense, parallel 18158965ea79SLois Curfman McInnes 181639ddd567SLois Curfman McInnes .seealso: MatCreate(), MatCreateSeqDense(), MatSetValues() 18178965ea79SLois Curfman McInnes @*/ 1818be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIDense(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscScalar *data,Mat *A) 18198965ea79SLois Curfman McInnes { 18206849ba73SBarry Smith PetscErrorCode ierr; 182113f74950SBarry Smith PetscMPIInt size; 18228965ea79SLois Curfman McInnes 18233a40ed3dSBarry Smith PetscFunctionBegin; 1824f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 1825f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 1826273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 1827273d9f13SBarry Smith if (size > 1) { 1828273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIDENSE);CHKERRQ(ierr); 1829273d9f13SBarry Smith ierr = MatMPIDenseSetPreallocation(*A,data);CHKERRQ(ierr); 1830273d9f13SBarry Smith } else { 1831273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQDENSE);CHKERRQ(ierr); 1832273d9f13SBarry Smith ierr = MatSeqDenseSetPreallocation(*A,data);CHKERRQ(ierr); 18338c469469SLois Curfman McInnes } 18343a40ed3dSBarry Smith PetscFunctionReturn(0); 18358965ea79SLois Curfman McInnes } 18368965ea79SLois Curfman McInnes 18374a2ae208SSatish Balay #undef __FUNCT__ 18384a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIDense" 18396849ba73SBarry Smith static PetscErrorCode MatDuplicate_MPIDense(Mat A,MatDuplicateOption cpvalues,Mat *newmat) 18408965ea79SLois Curfman McInnes { 18418965ea79SLois Curfman McInnes Mat mat; 18423501a2bdSLois Curfman McInnes Mat_MPIDense *a,*oldmat = (Mat_MPIDense*)A->data; 1843dfbe8321SBarry Smith PetscErrorCode ierr; 18448965ea79SLois Curfman McInnes 18453a40ed3dSBarry Smith PetscFunctionBegin; 18468965ea79SLois Curfman McInnes *newmat = 0; 18477adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&mat);CHKERRQ(ierr); 1848d0f46423SBarry Smith ierr = MatSetSizes(mat,A->rmap->n,A->cmap->n,A->rmap->N,A->cmap->N);CHKERRQ(ierr); 18497adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)A)->type_name);CHKERRQ(ierr); 1850834f8fabSBarry Smith a = (Mat_MPIDense*)mat->data; 1851e04c1aa4SHong Zhang ierr = PetscMemcpy(mat->ops,A->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 18525aa7edbeSHong Zhang 1853d5f3da31SBarry Smith mat->factortype = A->factortype; 1854c456f294SBarry Smith mat->assembled = PETSC_TRUE; 1855273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 18568965ea79SLois Curfman McInnes 18578965ea79SLois Curfman McInnes a->size = oldmat->size; 18588965ea79SLois Curfman McInnes a->rank = oldmat->rank; 1859e0fa3b82SLois Curfman McInnes mat->insertmode = NOT_SET_VALUES; 1860b9b97703SBarry Smith a->nvec = oldmat->nvec; 18613782ba37SSatish Balay a->donotstash = oldmat->donotstash; 1862e04c1aa4SHong Zhang 18635aa7edbeSHong Zhang ierr = PetscLayoutCopy(A->rmap,&mat->rmap);CHKERRQ(ierr); 18645aa7edbeSHong Zhang ierr = PetscLayoutCopy(A->cmap,&mat->cmap);CHKERRQ(ierr); 18658965ea79SLois Curfman McInnes 1866329f5518SBarry Smith ierr = MatSetUpMultiply_MPIDense(mat);CHKERRQ(ierr); 18675609ef8eSBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 186852e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 186901b82886SBarry Smith 18708965ea79SLois Curfman McInnes *newmat = mat; 18713a40ed3dSBarry Smith PetscFunctionReturn(0); 18728965ea79SLois Curfman McInnes } 18738965ea79SLois Curfman McInnes 18744a2ae208SSatish Balay #undef __FUNCT__ 18755bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIDense_DenseInFile" 18765bba2384SShri Abhyankar PetscErrorCode MatLoad_MPIDense_DenseInFile(MPI_Comm comm,PetscInt fd,PetscInt M,PetscInt N,Mat newmat,PetscInt sizesset) 187786d161a7SShri Abhyankar { 187886d161a7SShri Abhyankar PetscErrorCode ierr; 187986d161a7SShri Abhyankar PetscMPIInt rank,size; 188086d161a7SShri Abhyankar PetscInt *rowners,i,m,nz,j; 188186d161a7SShri Abhyankar PetscScalar *array,*vals,*vals_ptr; 188286d161a7SShri Abhyankar MPI_Status status; 188386d161a7SShri Abhyankar 188486d161a7SShri Abhyankar PetscFunctionBegin; 188586d161a7SShri Abhyankar ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 188686d161a7SShri Abhyankar ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 188786d161a7SShri Abhyankar 188886d161a7SShri Abhyankar /* determine ownership of all rows */ 188986d161a7SShri Abhyankar if (newmat->rmap->n < 0) m = M/size + ((M % size) > rank); 189086d161a7SShri Abhyankar else m = newmat->rmap->n; 189186d161a7SShri Abhyankar ierr = PetscMalloc((size+2)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 189286d161a7SShri Abhyankar ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 189386d161a7SShri Abhyankar rowners[0] = 0; 189486d161a7SShri Abhyankar for (i=2; i<=size; i++) { 189586d161a7SShri Abhyankar rowners[i] += rowners[i-1]; 189686d161a7SShri Abhyankar } 189786d161a7SShri Abhyankar 189886d161a7SShri Abhyankar if (!sizesset) { 189986d161a7SShri Abhyankar ierr = MatSetSizes(newmat,m,PETSC_DECIDE,M,N);CHKERRQ(ierr); 190086d161a7SShri Abhyankar } 190186d161a7SShri Abhyankar ierr = MatMPIDenseSetPreallocation(newmat,PETSC_NULL);CHKERRQ(ierr); 190286d161a7SShri Abhyankar ierr = MatGetArray(newmat,&array);CHKERRQ(ierr); 190386d161a7SShri Abhyankar 190486d161a7SShri Abhyankar if (!rank) { 190586d161a7SShri Abhyankar ierr = PetscMalloc(m*N*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 190686d161a7SShri Abhyankar 190786d161a7SShri Abhyankar /* read in my part of the matrix numerical values */ 190886d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,vals,m*N,PETSC_SCALAR);CHKERRQ(ierr); 190986d161a7SShri Abhyankar 191086d161a7SShri Abhyankar /* insert into matrix-by row (this is why cannot directly read into array */ 191186d161a7SShri Abhyankar vals_ptr = vals; 191286d161a7SShri Abhyankar for (i=0; i<m; i++) { 191386d161a7SShri Abhyankar for (j=0; j<N; j++) { 191486d161a7SShri Abhyankar array[i + j*m] = *vals_ptr++; 191586d161a7SShri Abhyankar } 191686d161a7SShri Abhyankar } 191786d161a7SShri Abhyankar 191886d161a7SShri Abhyankar /* read in other processors and ship out */ 191986d161a7SShri Abhyankar for (i=1; i<size; i++) { 192086d161a7SShri Abhyankar nz = (rowners[i+1] - rowners[i])*N; 192186d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 192286d161a7SShri Abhyankar ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)(newmat))->tag,comm);CHKERRQ(ierr); 192386d161a7SShri Abhyankar } 192486d161a7SShri Abhyankar } else { 192586d161a7SShri Abhyankar /* receive numeric values */ 192686d161a7SShri Abhyankar ierr = PetscMalloc(m*N*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 192786d161a7SShri Abhyankar 192886d161a7SShri Abhyankar /* receive message of values*/ 192986d161a7SShri Abhyankar ierr = MPI_Recv(vals,m*N,MPIU_SCALAR,0,((PetscObject)(newmat))->tag,comm,&status);CHKERRQ(ierr); 193086d161a7SShri Abhyankar 193186d161a7SShri Abhyankar /* insert into matrix-by row (this is why cannot directly read into array */ 193286d161a7SShri Abhyankar vals_ptr = vals; 193386d161a7SShri Abhyankar for (i=0; i<m; i++) { 193486d161a7SShri Abhyankar for (j=0; j<N; j++) { 193586d161a7SShri Abhyankar array[i + j*m] = *vals_ptr++; 193686d161a7SShri Abhyankar } 193786d161a7SShri Abhyankar } 193886d161a7SShri Abhyankar } 193986d161a7SShri Abhyankar ierr = PetscFree(rowners);CHKERRQ(ierr); 194086d161a7SShri Abhyankar ierr = PetscFree(vals);CHKERRQ(ierr); 194186d161a7SShri Abhyankar ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 194286d161a7SShri Abhyankar ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 194386d161a7SShri Abhyankar PetscFunctionReturn(0); 194486d161a7SShri Abhyankar } 194586d161a7SShri Abhyankar 194686d161a7SShri Abhyankar #undef __FUNCT__ 19475bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIDense" 1948112444f4SShri Abhyankar PetscErrorCode MatLoad_MPIDense(Mat newmat,PetscViewer viewer) 194986d161a7SShri Abhyankar { 195086d161a7SShri Abhyankar PetscScalar *vals,*svals; 195186d161a7SShri Abhyankar MPI_Comm comm = ((PetscObject)viewer)->comm; 195286d161a7SShri Abhyankar MPI_Status status; 195386d161a7SShri Abhyankar PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,*rowners,*sndcounts,m,maxnz; 195486d161a7SShri Abhyankar PetscInt header[4],*rowlengths = 0,M,N,*cols; 195586d161a7SShri Abhyankar PetscInt *ourlens,*procsnz = 0,*offlens,jj,*mycols,*smycols; 195686d161a7SShri Abhyankar PetscInt i,nz,j,rstart,rend,sizesset=1,grows,gcols; 195786d161a7SShri Abhyankar int fd; 195886d161a7SShri Abhyankar PetscErrorCode ierr; 195986d161a7SShri Abhyankar 196086d161a7SShri Abhyankar PetscFunctionBegin; 196186d161a7SShri Abhyankar ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 196286d161a7SShri Abhyankar ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 196386d161a7SShri Abhyankar if (!rank) { 196486d161a7SShri Abhyankar ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 196586d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 196686d161a7SShri Abhyankar if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 196786d161a7SShri Abhyankar } 196886d161a7SShri Abhyankar if (newmat->rmap->n < 0 && newmat->rmap->N < 0 && newmat->cmap->n < 0 && newmat->cmap->N < 0) sizesset = 0; 196986d161a7SShri Abhyankar 197086d161a7SShri Abhyankar ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 197186d161a7SShri Abhyankar M = header[1]; N = header[2]; nz = header[3]; 197286d161a7SShri Abhyankar 197386d161a7SShri Abhyankar /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */ 197486d161a7SShri Abhyankar if (sizesset && newmat->rmap->N < 0) newmat->rmap->N = M; 197586d161a7SShri Abhyankar if (sizesset && newmat->cmap->N < 0) newmat->cmap->N = N; 197686d161a7SShri Abhyankar 197786d161a7SShri Abhyankar /* If global sizes are set, check if they are consistent with that given in the file */ 197886d161a7SShri Abhyankar if (sizesset) { 197986d161a7SShri Abhyankar ierr = MatGetSize(newmat,&grows,&gcols);CHKERRQ(ierr); 198086d161a7SShri Abhyankar } 1981abd38a8fSBarry Smith if (sizesset && newmat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows); 1982abd38a8fSBarry Smith if (sizesset && newmat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols); 198386d161a7SShri Abhyankar 198486d161a7SShri Abhyankar /* 198586d161a7SShri Abhyankar Handle case where matrix is stored on disk as a dense matrix 198686d161a7SShri Abhyankar */ 198786d161a7SShri Abhyankar if (nz == MATRIX_BINARY_FORMAT_DENSE) { 19885bba2384SShri Abhyankar ierr = MatLoad_MPIDense_DenseInFile(comm,fd,M,N,newmat,sizesset);CHKERRQ(ierr); 198986d161a7SShri Abhyankar PetscFunctionReturn(0); 199086d161a7SShri Abhyankar } 199186d161a7SShri Abhyankar 199286d161a7SShri Abhyankar /* determine ownership of all rows */ 199386d161a7SShri Abhyankar if (newmat->rmap->n < 0) m = PetscMPIIntCast(M/size + ((M % size) > rank)); 199486d161a7SShri Abhyankar else m = PetscMPIIntCast(newmat->rmap->n); 199586d161a7SShri Abhyankar ierr = PetscMalloc((size+2)*sizeof(PetscMPIInt),&rowners);CHKERRQ(ierr); 199686d161a7SShri Abhyankar ierr = MPI_Allgather(&m,1,MPI_INT,rowners+1,1,MPI_INT,comm);CHKERRQ(ierr); 199786d161a7SShri Abhyankar rowners[0] = 0; 199886d161a7SShri Abhyankar for (i=2; i<=size; i++) { 199986d161a7SShri Abhyankar rowners[i] += rowners[i-1]; 200086d161a7SShri Abhyankar } 200186d161a7SShri Abhyankar rstart = rowners[rank]; 200286d161a7SShri Abhyankar rend = rowners[rank+1]; 200386d161a7SShri Abhyankar 200486d161a7SShri Abhyankar /* distribute row lengths to all processors */ 200586d161a7SShri Abhyankar ierr = PetscMalloc2(rend-rstart,PetscInt,&ourlens,rend-rstart,PetscInt,&offlens);CHKERRQ(ierr); 200686d161a7SShri Abhyankar if (!rank) { 200786d161a7SShri Abhyankar ierr = PetscMalloc(M*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 200886d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,rowlengths,M,PETSC_INT);CHKERRQ(ierr); 200986d161a7SShri Abhyankar ierr = PetscMalloc(size*sizeof(PetscMPIInt),&sndcounts);CHKERRQ(ierr); 201086d161a7SShri Abhyankar for (i=0; i<size; i++) sndcounts[i] = rowners[i+1] - rowners[i]; 201186d161a7SShri Abhyankar ierr = MPI_Scatterv(rowlengths,sndcounts,rowners,MPIU_INT,ourlens,rend-rstart,MPIU_INT,0,comm);CHKERRQ(ierr); 201286d161a7SShri Abhyankar ierr = PetscFree(sndcounts);CHKERRQ(ierr); 201386d161a7SShri Abhyankar } else { 201486d161a7SShri Abhyankar ierr = MPI_Scatterv(0,0,0,MPIU_INT,ourlens,rend-rstart,MPIU_INT,0,comm);CHKERRQ(ierr); 201586d161a7SShri Abhyankar } 201686d161a7SShri Abhyankar 201786d161a7SShri Abhyankar if (!rank) { 201886d161a7SShri Abhyankar /* calculate the number of nonzeros on each processor */ 201986d161a7SShri Abhyankar ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 202086d161a7SShri Abhyankar ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 202186d161a7SShri Abhyankar for (i=0; i<size; i++) { 202286d161a7SShri Abhyankar for (j=rowners[i]; j< rowners[i+1]; j++) { 202386d161a7SShri Abhyankar procsnz[i] += rowlengths[j]; 202486d161a7SShri Abhyankar } 202586d161a7SShri Abhyankar } 202686d161a7SShri Abhyankar ierr = PetscFree(rowlengths);CHKERRQ(ierr); 202786d161a7SShri Abhyankar 202886d161a7SShri Abhyankar /* determine max buffer needed and allocate it */ 202986d161a7SShri Abhyankar maxnz = 0; 203086d161a7SShri Abhyankar for (i=0; i<size; i++) { 203186d161a7SShri Abhyankar maxnz = PetscMax(maxnz,procsnz[i]); 203286d161a7SShri Abhyankar } 203386d161a7SShri Abhyankar ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 203486d161a7SShri Abhyankar 203586d161a7SShri Abhyankar /* read in my part of the matrix column indices */ 203686d161a7SShri Abhyankar nz = procsnz[0]; 203786d161a7SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 203886d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 203986d161a7SShri Abhyankar 204086d161a7SShri Abhyankar /* read in every one elses and ship off */ 204186d161a7SShri Abhyankar for (i=1; i<size; i++) { 204286d161a7SShri Abhyankar nz = procsnz[i]; 204386d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 204486d161a7SShri Abhyankar ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 204586d161a7SShri Abhyankar } 204686d161a7SShri Abhyankar ierr = PetscFree(cols);CHKERRQ(ierr); 204786d161a7SShri Abhyankar } else { 204886d161a7SShri Abhyankar /* determine buffer space needed for message */ 204986d161a7SShri Abhyankar nz = 0; 205086d161a7SShri Abhyankar for (i=0; i<m; i++) { 205186d161a7SShri Abhyankar nz += ourlens[i]; 205286d161a7SShri Abhyankar } 205386d161a7SShri Abhyankar ierr = PetscMalloc((nz+1)*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 205486d161a7SShri Abhyankar 205586d161a7SShri Abhyankar /* receive message of column indices*/ 205686d161a7SShri Abhyankar ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 205786d161a7SShri Abhyankar ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr); 205886d161a7SShri Abhyankar if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file"); 205986d161a7SShri Abhyankar } 206086d161a7SShri Abhyankar 206186d161a7SShri Abhyankar /* loop over local rows, determining number of off diagonal entries */ 206286d161a7SShri Abhyankar ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 206386d161a7SShri Abhyankar jj = 0; 206486d161a7SShri Abhyankar for (i=0; i<m; i++) { 206586d161a7SShri Abhyankar for (j=0; j<ourlens[i]; j++) { 206686d161a7SShri Abhyankar if (mycols[jj] < rstart || mycols[jj] >= rend) offlens[i]++; 206786d161a7SShri Abhyankar jj++; 206886d161a7SShri Abhyankar } 206986d161a7SShri Abhyankar } 207086d161a7SShri Abhyankar 207186d161a7SShri Abhyankar /* create our matrix */ 207286d161a7SShri Abhyankar for (i=0; i<m; i++) { 207386d161a7SShri Abhyankar ourlens[i] -= offlens[i]; 207486d161a7SShri Abhyankar } 207586d161a7SShri Abhyankar 207686d161a7SShri Abhyankar if (!sizesset) { 207786d161a7SShri Abhyankar ierr = MatSetSizes(newmat,m,PETSC_DECIDE,M,N);CHKERRQ(ierr); 207886d161a7SShri Abhyankar } 207986d161a7SShri Abhyankar ierr = MatMPIDenseSetPreallocation(newmat,PETSC_NULL);CHKERRQ(ierr); 208086d161a7SShri Abhyankar for (i=0; i<m; i++) { 208186d161a7SShri Abhyankar ourlens[i] += offlens[i]; 208286d161a7SShri Abhyankar } 208386d161a7SShri Abhyankar 208486d161a7SShri Abhyankar if (!rank) { 208586d161a7SShri Abhyankar ierr = PetscMalloc(maxnz*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 208686d161a7SShri Abhyankar 208786d161a7SShri Abhyankar /* read in my part of the matrix numerical values */ 208886d161a7SShri Abhyankar nz = procsnz[0]; 208986d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 209086d161a7SShri Abhyankar 209186d161a7SShri Abhyankar /* insert into matrix */ 209286d161a7SShri Abhyankar jj = rstart; 209386d161a7SShri Abhyankar smycols = mycols; 209486d161a7SShri Abhyankar svals = vals; 209586d161a7SShri Abhyankar for (i=0; i<m; i++) { 209686d161a7SShri Abhyankar ierr = MatSetValues(newmat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 209786d161a7SShri Abhyankar smycols += ourlens[i]; 209886d161a7SShri Abhyankar svals += ourlens[i]; 209986d161a7SShri Abhyankar jj++; 210086d161a7SShri Abhyankar } 210186d161a7SShri Abhyankar 210286d161a7SShri Abhyankar /* read in other processors and ship out */ 210386d161a7SShri Abhyankar for (i=1; i<size; i++) { 210486d161a7SShri Abhyankar nz = procsnz[i]; 210586d161a7SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 210686d161a7SShri Abhyankar ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newmat)->tag,comm);CHKERRQ(ierr); 210786d161a7SShri Abhyankar } 210886d161a7SShri Abhyankar ierr = PetscFree(procsnz);CHKERRQ(ierr); 210986d161a7SShri Abhyankar } else { 211086d161a7SShri Abhyankar /* receive numeric values */ 211186d161a7SShri Abhyankar ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 211286d161a7SShri Abhyankar 211386d161a7SShri Abhyankar /* receive message of values*/ 211486d161a7SShri Abhyankar ierr = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newmat)->tag,comm,&status);CHKERRQ(ierr); 211586d161a7SShri Abhyankar ierr = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr); 211686d161a7SShri Abhyankar if (maxnz != nz) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file"); 211786d161a7SShri Abhyankar 211886d161a7SShri Abhyankar /* insert into matrix */ 211986d161a7SShri Abhyankar jj = rstart; 212086d161a7SShri Abhyankar smycols = mycols; 212186d161a7SShri Abhyankar svals = vals; 212286d161a7SShri Abhyankar for (i=0; i<m; i++) { 212386d161a7SShri Abhyankar ierr = MatSetValues(newmat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 212486d161a7SShri Abhyankar smycols += ourlens[i]; 212586d161a7SShri Abhyankar svals += ourlens[i]; 212686d161a7SShri Abhyankar jj++; 212786d161a7SShri Abhyankar } 212886d161a7SShri Abhyankar } 212986d161a7SShri Abhyankar ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 213086d161a7SShri Abhyankar ierr = PetscFree(vals);CHKERRQ(ierr); 213186d161a7SShri Abhyankar ierr = PetscFree(mycols);CHKERRQ(ierr); 213286d161a7SShri Abhyankar ierr = PetscFree(rowners);CHKERRQ(ierr); 213386d161a7SShri Abhyankar 213486d161a7SShri Abhyankar ierr = MatAssemblyBegin(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 213586d161a7SShri Abhyankar ierr = MatAssemblyEnd(newmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 213686d161a7SShri Abhyankar PetscFunctionReturn(0); 213786d161a7SShri Abhyankar } 213886d161a7SShri Abhyankar 213986d161a7SShri Abhyankar #undef __FUNCT__ 21406e4ee0c6SHong Zhang #define __FUNCT__ "MatEqual_MPIDense" 2141ace3abfcSBarry Smith PetscErrorCode MatEqual_MPIDense(Mat A,Mat B,PetscBool *flag) 21426e4ee0c6SHong Zhang { 21436e4ee0c6SHong Zhang Mat_MPIDense *matB = (Mat_MPIDense*)B->data,*matA = (Mat_MPIDense*)A->data; 21446e4ee0c6SHong Zhang Mat a,b; 2145ace3abfcSBarry Smith PetscBool flg; 21466e4ee0c6SHong Zhang PetscErrorCode ierr; 214790ace30eSBarry Smith 21486e4ee0c6SHong Zhang PetscFunctionBegin; 21496e4ee0c6SHong Zhang a = matA->A; 21506e4ee0c6SHong Zhang b = matB->A; 21516e4ee0c6SHong Zhang ierr = MatEqual(a,b,&flg);CHKERRQ(ierr); 21527adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 21536e4ee0c6SHong Zhang PetscFunctionReturn(0); 21546e4ee0c6SHong Zhang } 215590ace30eSBarry Smith 215609d27a7eSBarry Smith #if defined(PETSC_HAVE_PLAPACK) 215709d27a7eSBarry Smith 215809d27a7eSBarry Smith #undef __FUNCT__ 215909d27a7eSBarry Smith #define __FUNCT__ "PetscPLAPACKFinalizePackage" 216009d27a7eSBarry Smith /*@C 216109d27a7eSBarry Smith PetscPLAPACKFinalizePackage - This function destroys everything in the Petsc interface to PLAPACK. 216209d27a7eSBarry Smith Level: developer 216309d27a7eSBarry Smith 216409d27a7eSBarry Smith .keywords: Petsc, destroy, package, PLAPACK 216509d27a7eSBarry Smith .seealso: PetscFinalize() 216609d27a7eSBarry Smith @*/ 2167cfb32e20SJed Brown PetscErrorCode PETSCMAT_DLLEXPORT PetscPLAPACKFinalizePackage(void) 216809d27a7eSBarry Smith { 216909d27a7eSBarry Smith PetscErrorCode ierr; 217009d27a7eSBarry Smith 217109d27a7eSBarry Smith PetscFunctionBegin; 217209d27a7eSBarry Smith ierr = PLA_Finalize();CHKERRQ(ierr); 217309d27a7eSBarry Smith PetscFunctionReturn(0); 217409d27a7eSBarry Smith } 217509d27a7eSBarry Smith 217609d27a7eSBarry Smith #undef __FUNCT__ 217709d27a7eSBarry Smith #define __FUNCT__ "PetscPLAPACKInitializePackage" 217809d27a7eSBarry Smith /*@C 217909d27a7eSBarry Smith PetscPLAPACKInitializePackage - This function initializes everything in the Petsc interface to PLAPACK. It is 2180db4efbfdSBarry Smith called from MatCreate_MPIDense() the first time an MPI dense matrix is called. 218109d27a7eSBarry Smith 218209d27a7eSBarry Smith Input Parameter: 2183db4efbfdSBarry Smith . comm - the communicator the matrix lives on 218409d27a7eSBarry Smith 218509d27a7eSBarry Smith Level: developer 218609d27a7eSBarry Smith 2187db4efbfdSBarry Smith Notes: PLAPACK does not have a good fit with MPI communicators; all (parallel) PLAPACK objects have to live in the 2188db4efbfdSBarry Smith same communicator (because there is some global state that is initialized and used for all matrices). In addition if 2189db4efbfdSBarry Smith PLAPACK is initialized (that is the initial matrices created) are on subcommunicators of MPI_COMM_WORLD, these subcommunicators 2190db4efbfdSBarry Smith cannot overlap. 2191db4efbfdSBarry Smith 219209d27a7eSBarry Smith .keywords: Petsc, initialize, package, PLAPACK 21933460a69eSHong Zhang .seealso: PetscSysInitializePackage(), PetscInitialize() 219409d27a7eSBarry Smith @*/ 2195cfb32e20SJed Brown PetscErrorCode PETSCMAT_DLLEXPORT PetscPLAPACKInitializePackage(MPI_Comm comm) 219609d27a7eSBarry Smith { 2197eae6fb2eSBarry Smith PetscMPIInt size; 219809d27a7eSBarry Smith PetscErrorCode ierr; 219909d27a7eSBarry Smith 220009d27a7eSBarry Smith PetscFunctionBegin; 220109d27a7eSBarry Smith if (!PLA_Initialized(PETSC_NULL)) { 220209d27a7eSBarry Smith 220309d27a7eSBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 220404fea9ffSBarry Smith Plapack_nprows = 1; 220504fea9ffSBarry Smith Plapack_npcols = size; 220609d27a7eSBarry Smith 2207aeccfd6fSBarry Smith ierr = PetscOptionsBegin(comm,PETSC_NULL,"PLAPACK Options","Mat");CHKERRQ(ierr); 22085d00a290SHong Zhang ierr = PetscOptionsInt("-mat_plapack_nprows","row dimension of 2D processor mesh","None",Plapack_nprows,&Plapack_nprows,PETSC_NULL);CHKERRQ(ierr); 22095d00a290SHong Zhang ierr = PetscOptionsInt("-mat_plapack_npcols","column dimension of 2D processor mesh","None",Plapack_npcols,&Plapack_npcols,PETSC_NULL);CHKERRQ(ierr); 221079b0a62dSBarry Smith #if defined(PETSC_USE_DEBUG) 221179b0a62dSBarry Smith Plapack_ierror = 3; 221279b0a62dSBarry Smith #else 221379b0a62dSBarry Smith Plapack_ierror = 0; 221479b0a62dSBarry Smith #endif 22155d00a290SHong Zhang ierr = PetscOptionsInt("-mat_plapack_ckerror","error checking flag","None",Plapack_ierror,&Plapack_ierror,PETSC_NULL);CHKERRQ(ierr); 2216eae6fb2eSBarry Smith if (Plapack_ierror){ 2217eae6fb2eSBarry Smith ierr = PLA_Set_error_checking(Plapack_ierror,PETSC_TRUE,PETSC_TRUE,PETSC_FALSE );CHKERRQ(ierr); 2218aeccfd6fSBarry Smith } else { 2219eae6fb2eSBarry Smith ierr = PLA_Set_error_checking(Plapack_ierror,PETSC_FALSE,PETSC_FALSE,PETSC_FALSE );CHKERRQ(ierr); 2220aeccfd6fSBarry Smith } 2221aeccfd6fSBarry Smith 2222eae6fb2eSBarry Smith Plapack_nb_alg = 0; 22235d00a290SHong Zhang ierr = PetscOptionsInt("-mat_plapack_nb_alg","algorithmic block size","None",Plapack_nb_alg,&Plapack_nb_alg,PETSC_NULL);CHKERRQ(ierr); 2224eae6fb2eSBarry Smith if (Plapack_nb_alg) { 2225eae6fb2eSBarry Smith ierr = pla_Environ_set_nb_alg (PLA_OP_ALL_ALG,Plapack_nb_alg);CHKERRQ(ierr); 2226aeccfd6fSBarry Smith } 2227aeccfd6fSBarry Smith PetscOptionsEnd(); 2228aeccfd6fSBarry Smith 222904fea9ffSBarry Smith ierr = PLA_Comm_1D_to_2D(comm,Plapack_nprows,Plapack_npcols,&Plapack_comm_2d);CHKERRQ(ierr); 223004fea9ffSBarry Smith ierr = PLA_Init(Plapack_comm_2d);CHKERRQ(ierr); 223109d27a7eSBarry Smith ierr = PetscRegisterFinalize(PetscPLAPACKFinalizePackage);CHKERRQ(ierr); 223209d27a7eSBarry Smith } 223309d27a7eSBarry Smith PetscFunctionReturn(0); 223409d27a7eSBarry Smith } 223590ace30eSBarry Smith 223609d27a7eSBarry Smith #endif 2237