1be1d678aSKris Buschelman #define PETSCMAT_DLL 28a729477SBarry Smith 37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h" /*I "petscmat.h" I*/ 48a729477SBarry Smith 5dd6ea824SBarry Smith #undef __FUNCT__ 6dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 7dd6ea824SBarry Smith /* 8dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 9dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 10dd6ea824SBarry Smith 11dd6ea824SBarry Smith Only for square matrices 12dd6ea824SBarry Smith */ 13dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 14dd6ea824SBarry Smith { 15dd6ea824SBarry Smith PetscMPIInt rank,size; 16dd6ea824SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld; 17dd6ea824SBarry Smith PetscErrorCode ierr; 18dd6ea824SBarry Smith Mat mat; 19dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 20dd6ea824SBarry Smith PetscMPIInt tag; 21dd6ea824SBarry Smith MPI_Status status; 22dd6ea824SBarry Smith PetscTruth aij; 23dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 24dd6ea824SBarry Smith 25dd6ea824SBarry Smith PetscFunctionBegin; 26dd6ea824SBarry Smith CHKMEMQ; 27dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 28dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 29dd6ea824SBarry Smith if (!rank) { 30dd6ea824SBarry Smith ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 31dd6ea824SBarry Smith if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 32dd6ea824SBarry Smith } 33dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 34dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 35dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 36dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 37dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 38dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 39dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 40dd6ea824SBarry Smith rowners[0] = 0; 41dd6ea824SBarry Smith for (i=2; i<=size; i++) { 42dd6ea824SBarry Smith rowners[i] += rowners[i-1]; 43dd6ea824SBarry Smith } 44dd6ea824SBarry Smith rstart = rowners[rank]; 45dd6ea824SBarry Smith rend = rowners[rank+1]; 46dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 47dd6ea824SBarry Smith if (!rank) { 48dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 49dd6ea824SBarry Smith /* send row lengths to all processors */ 50dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 51dd6ea824SBarry Smith for (i=1; i<size; i++) { 52dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 53dd6ea824SBarry Smith } 54dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 55dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 56dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 57dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 58dd6ea824SBarry Smith jj = 0; 59dd6ea824SBarry Smith for (i=0; i<m; i++) { 60dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 61dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 62dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 63dd6ea824SBarry Smith jj++; 64dd6ea824SBarry Smith } 65dd6ea824SBarry Smith } 66dd6ea824SBarry Smith /* send column indices to other processes */ 67dd6ea824SBarry Smith for (i=1; i<size; i++) { 68dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 69dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 70dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 71dd6ea824SBarry Smith } 72dd6ea824SBarry Smith 73dd6ea824SBarry Smith /* send numerical values to other processes */ 74dd6ea824SBarry Smith for (i=1; i<size; i++) { 75dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 76dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 77dd6ea824SBarry Smith } 78dd6ea824SBarry Smith gmataa = gmata->a; 79dd6ea824SBarry Smith gmataj = gmata->j; 80dd6ea824SBarry Smith 81dd6ea824SBarry Smith } else { 82dd6ea824SBarry Smith /* receive row lengths */ 83dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 84dd6ea824SBarry Smith /* receive column indices */ 85dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 86dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 87dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 88dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 89dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 90dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 91dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 92dd6ea824SBarry Smith jj = 0; 93dd6ea824SBarry Smith for (i=0; i<m; i++) { 94dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 95dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 96dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 97dd6ea824SBarry Smith jj++; 98dd6ea824SBarry Smith } 99dd6ea824SBarry Smith } 100dd6ea824SBarry Smith /* receive numerical values */ 101dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 102dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 103dd6ea824SBarry Smith } 104dd6ea824SBarry Smith /* set preallocation */ 105dd6ea824SBarry Smith for (i=0; i<m; i++) { 106dd6ea824SBarry Smith dlens[i] -= olens[i]; 107dd6ea824SBarry Smith } 108dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 109dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 110dd6ea824SBarry Smith 111dd6ea824SBarry Smith for (i=0; i<m; i++) { 112dd6ea824SBarry Smith dlens[i] += olens[i]; 113dd6ea824SBarry Smith } 114dd6ea824SBarry Smith cnt = 0; 115dd6ea824SBarry Smith for (i=0; i<m; i++) { 116dd6ea824SBarry Smith row = rstart + i; 117dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 118dd6ea824SBarry Smith cnt += dlens[i]; 119dd6ea824SBarry Smith } 120dd6ea824SBarry Smith if (rank) { 121dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 122dd6ea824SBarry Smith } 123dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 124dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 125dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 126dd6ea824SBarry Smith *inmat = mat; 127dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 128dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 129dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 130dd6ea824SBarry Smith mat = *inmat; 131dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 132dd6ea824SBarry Smith if (!rank) { 133dd6ea824SBarry Smith /* send numerical values to other processes */ 134dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 135dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 136dd6ea824SBarry Smith gmataa = gmata->a; 137dd6ea824SBarry Smith for (i=1; i<size; i++) { 138dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 139dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 140dd6ea824SBarry Smith } 141dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 142dd6ea824SBarry Smith } else { 143dd6ea824SBarry Smith /* receive numerical values from process 0*/ 144dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 145dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 146dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 147dd6ea824SBarry Smith } 148dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 149dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 150dd6ea824SBarry Smith ad = Ad->a; 151dd6ea824SBarry Smith ao = Ao->a; 152d0f46423SBarry Smith if (mat->rmap->n) { 153dd6ea824SBarry Smith i = 0; 154dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 155dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 156dd6ea824SBarry Smith } 157d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 158dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 159dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 160dd6ea824SBarry Smith } 161dd6ea824SBarry Smith i--; 162d0f46423SBarry Smith if (mat->rmap->n) { 163dd6ea824SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 164dd6ea824SBarry Smith } 165dd6ea824SBarry Smith if (rank) { 166dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 167dd6ea824SBarry Smith } 168dd6ea824SBarry Smith } 169dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 170dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 171dd6ea824SBarry Smith CHKMEMQ; 172dd6ea824SBarry Smith PetscFunctionReturn(0); 173dd6ea824SBarry Smith } 174dd6ea824SBarry Smith 1750f5bd95cSBarry Smith /* 1760f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 1779e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 1780f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 1790f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 1800f5bd95cSBarry Smith has an order N integer array but is fast to acess. 1819e25ed09SBarry Smith */ 1824a2ae208SSatish Balay #undef __FUNCT__ 1834a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private" 184dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat) 1859e25ed09SBarry Smith { 18644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1876849ba73SBarry Smith PetscErrorCode ierr; 188d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 189dbb450caSBarry Smith 1903a40ed3dSBarry Smith PetscFunctionBegin; 191aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 192273d9f13SBarry Smith ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr); 193b1fc9764SSatish Balay for (i=0; i<n; i++){ 1940f5bd95cSBarry Smith ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr); 195b1fc9764SSatish Balay } 196b1fc9764SSatish Balay #else 197d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 198d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 199d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 200905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 201b1fc9764SSatish Balay #endif 2023a40ed3dSBarry Smith PetscFunctionReturn(0); 2039e25ed09SBarry Smith } 2049e25ed09SBarry Smith 205085a36d4SBarry Smith 2060520107fSSatish Balay #define CHUNKSIZE 15 20730770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 2080520107fSSatish Balay { \ 2097cd84e04SBarry Smith if (col <= lastcol1) low1 = 0; else high1 = nrow1; \ 210fd3458f5SBarry Smith lastcol1 = col;\ 211fd3458f5SBarry Smith while (high1-low1 > 5) { \ 212fd3458f5SBarry Smith t = (low1+high1)/2; \ 213fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 214fd3458f5SBarry Smith else low1 = t; \ 215ba4e3ef2SSatish Balay } \ 216fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 217fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 218fd3458f5SBarry Smith if (rp1[_i] == col) { \ 219fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 220fd3458f5SBarry Smith else ap1[_i] = value; \ 22130770e4dSSatish Balay goto a_noinsert; \ 2220520107fSSatish Balay } \ 2230520107fSSatish Balay } \ 224e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 225e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 226085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 227421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 228669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 2290520107fSSatish Balay /* shift up all the later entries in this row */ \ 2300520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 231fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 232fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 2330520107fSSatish Balay } \ 234fd3458f5SBarry Smith rp1[_i] = col; \ 235fd3458f5SBarry Smith ap1[_i] = value; \ 23630770e4dSSatish Balay a_noinsert: ; \ 237fd3458f5SBarry Smith ailen[row] = nrow1; \ 2380520107fSSatish Balay } 2390a198c4cSBarry Smith 240085a36d4SBarry Smith 24130770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 24230770e4dSSatish Balay { \ 2437cd84e04SBarry Smith if (col <= lastcol2) low2 = 0; else high2 = nrow2; \ 244fd3458f5SBarry Smith lastcol2 = col;\ 245fd3458f5SBarry Smith while (high2-low2 > 5) { \ 246fd3458f5SBarry Smith t = (low2+high2)/2; \ 247fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 248fd3458f5SBarry Smith else low2 = t; \ 249ba4e3ef2SSatish Balay } \ 250fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 251fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 252fd3458f5SBarry Smith if (rp2[_i] == col) { \ 253fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 254fd3458f5SBarry Smith else ap2[_i] = value; \ 25530770e4dSSatish Balay goto b_noinsert; \ 25630770e4dSSatish Balay } \ 25730770e4dSSatish Balay } \ 258e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 259e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 260085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 261421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 262669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 26330770e4dSSatish Balay /* shift up all the later entries in this row */ \ 26430770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 265fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 266fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 26730770e4dSSatish Balay } \ 268fd3458f5SBarry Smith rp2[_i] = col; \ 269fd3458f5SBarry Smith ap2[_i] = value; \ 27030770e4dSSatish Balay b_noinsert: ; \ 271fd3458f5SBarry Smith bilen[row] = nrow2; \ 27230770e4dSSatish Balay } 27330770e4dSSatish Balay 2744a2ae208SSatish Balay #undef __FUNCT__ 2752fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 2762fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 2772fd7e33dSBarry Smith { 2782fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 2792fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 2802fd7e33dSBarry Smith PetscErrorCode ierr; 2812fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 2822fd7e33dSBarry Smith 2832fd7e33dSBarry Smith PetscFunctionBegin; 2842fd7e33dSBarry Smith /* code only works for square matrices A */ 2852fd7e33dSBarry Smith 2862fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 2872fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 2882fd7e33dSBarry Smith row = row - diag; 2892fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 2902fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 2912fd7e33dSBarry Smith } 2922fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 2932fd7e33dSBarry Smith 2942fd7e33dSBarry Smith /* diagonal part */ 2952fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 2962fd7e33dSBarry Smith 2972fd7e33dSBarry Smith /* right of diagonal part */ 2982fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 2992fd7e33dSBarry Smith PetscFunctionReturn(0); 3002fd7e33dSBarry Smith } 3012fd7e33dSBarry Smith 3022fd7e33dSBarry Smith #undef __FUNCT__ 3034a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 304b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 3058a729477SBarry Smith { 30644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 30787828ca2SBarry Smith PetscScalar value; 308dfbe8321SBarry Smith PetscErrorCode ierr; 309d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 310d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 311273d9f13SBarry Smith PetscTruth roworiented = aij->roworiented; 3128a729477SBarry Smith 3130520107fSSatish Balay /* Some Variables required in the macro */ 3144ee7247eSSatish Balay Mat A = aij->A; 3154ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 31657809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 317a77337e4SBarry Smith MatScalar *aa = a->a; 318edb03aefSBarry Smith PetscTruth ignorezeroentries = a->ignorezeroentries; 31930770e4dSSatish Balay Mat B = aij->B; 32030770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 321d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 322a77337e4SBarry Smith MatScalar *ba = b->a; 32330770e4dSSatish Balay 324fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 325fd3458f5SBarry Smith PetscInt nonew = a->nonew; 326a77337e4SBarry Smith MatScalar *ap1,*ap2; 3274ee7247eSSatish Balay 3283a40ed3dSBarry Smith PetscFunctionBegin; 32971fd2e92SBarry Smith if (v) PetscValidScalarPointer(v,6); 3308a729477SBarry Smith for (i=0; i<m; i++) { 3315ef9f2a5SBarry Smith if (im[i] < 0) continue; 3322515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 333d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 3340a198c4cSBarry Smith #endif 3354b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 3364b0e389bSBarry Smith row = im[i] - rstart; 337fd3458f5SBarry Smith lastcol1 = -1; 338fd3458f5SBarry Smith rp1 = aj + ai[row]; 339fd3458f5SBarry Smith ap1 = aa + ai[row]; 340fd3458f5SBarry Smith rmax1 = aimax[row]; 341fd3458f5SBarry Smith nrow1 = ailen[row]; 342fd3458f5SBarry Smith low1 = 0; 343fd3458f5SBarry Smith high1 = nrow1; 344fd3458f5SBarry Smith lastcol2 = -1; 345fd3458f5SBarry Smith rp2 = bj + bi[row]; 346d498b1e9SBarry Smith ap2 = ba + bi[row]; 347fd3458f5SBarry Smith rmax2 = bimax[row]; 348d498b1e9SBarry Smith nrow2 = bilen[row]; 349fd3458f5SBarry Smith low2 = 0; 350fd3458f5SBarry Smith high2 = nrow2; 351fd3458f5SBarry Smith 3521eb62cbbSBarry Smith for (j=0; j<n; j++) { 35316371a99SBarry Smith if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0; 354abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 355fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend){ 356fd3458f5SBarry Smith col = in[j] - cstart; 35730770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 358273d9f13SBarry Smith } else if (in[j] < 0) continue; 3592515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 360d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 3610a198c4cSBarry Smith #endif 3621eb62cbbSBarry Smith else { 363227d817aSBarry Smith if (mat->was_assembled) { 364905e6a2fSBarry Smith if (!aij->colmap) { 365905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 366905e6a2fSBarry Smith } 367aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 3680f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 369fa46199cSSatish Balay col--; 370b1fc9764SSatish Balay #else 371905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 372b1fc9764SSatish Balay #endif 373ec8511deSBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 3742493cbb0SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 3754b0e389bSBarry Smith col = in[j]; 3769bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 377f9508a3cSSatish Balay B = aij->B; 378f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 379e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 380d498b1e9SBarry Smith rp2 = bj + bi[row]; 381d498b1e9SBarry Smith ap2 = ba + bi[row]; 382d498b1e9SBarry Smith rmax2 = bimax[row]; 383d498b1e9SBarry Smith nrow2 = bilen[row]; 384d498b1e9SBarry Smith low2 = 0; 385d498b1e9SBarry Smith high2 = nrow2; 386d0f46423SBarry Smith bm = aij->B->rmap->n; 387f9508a3cSSatish Balay ba = b->a; 388d6dfbf8fSBarry Smith } 389c48de900SBarry Smith } else col = in[j]; 39030770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 3911eb62cbbSBarry Smith } 3921eb62cbbSBarry Smith } 3935ef9f2a5SBarry Smith } else { 39490f02eecSBarry Smith if (!aij->donotstash) { 395d36fbae8SSatish Balay if (roworiented) { 3963b024144SHong Zhang ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 397d36fbae8SSatish Balay } else { 3983b024144SHong Zhang ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 3994b0e389bSBarry Smith } 4001eb62cbbSBarry Smith } 4018a729477SBarry Smith } 40290f02eecSBarry Smith } 4033a40ed3dSBarry Smith PetscFunctionReturn(0); 4048a729477SBarry Smith } 4058a729477SBarry Smith 4064a2ae208SSatish Balay #undef __FUNCT__ 4074a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 408b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 409b49de8d1SLois Curfman McInnes { 410b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 411dfbe8321SBarry Smith PetscErrorCode ierr; 412d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 413d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 414b49de8d1SLois Curfman McInnes 4153a40ed3dSBarry Smith PetscFunctionBegin; 416b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 41797e567efSBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 418d0f46423SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 419b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 420b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 421b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 42297e567efSBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 423d0f46423SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 424b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend){ 425b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 426b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 427fa852ad4SSatish Balay } else { 428905e6a2fSBarry Smith if (!aij->colmap) { 429905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 430905e6a2fSBarry Smith } 431aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 4320f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 433fa46199cSSatish Balay col --; 434b1fc9764SSatish Balay #else 435905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 436b1fc9764SSatish Balay #endif 437e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 438d9d09a02SSatish Balay else { 439b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 440b49de8d1SLois Curfman McInnes } 441b49de8d1SLois Curfman McInnes } 442b49de8d1SLois Curfman McInnes } 443a8c6a408SBarry Smith } else { 44429bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Only local values currently supported"); 445b49de8d1SLois Curfman McInnes } 446b49de8d1SLois Curfman McInnes } 4473a40ed3dSBarry Smith PetscFunctionReturn(0); 448b49de8d1SLois Curfman McInnes } 449bc5ccf88SSatish Balay 450bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec); 451bd0c2dcbSBarry Smith 4524a2ae208SSatish Balay #undef __FUNCT__ 4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 455bc5ccf88SSatish Balay { 456bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 457dfbe8321SBarry Smith PetscErrorCode ierr; 458b1d57f15SBarry Smith PetscInt nstash,reallocs; 459bc5ccf88SSatish Balay InsertMode addv; 460bc5ccf88SSatish Balay 461bc5ccf88SSatish Balay PetscFunctionBegin; 462bc5ccf88SSatish Balay if (aij->donotstash) { 463bc5ccf88SSatish Balay PetscFunctionReturn(0); 464bc5ccf88SSatish Balay } 465bc5ccf88SSatish Balay 466bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 4677adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 468bc5ccf88SSatish Balay if (addv == (ADD_VALUES|INSERT_VALUES)) { 46929bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 470bc5ccf88SSatish Balay } 471bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 472bc5ccf88SSatish Balay 473d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 4748798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 475ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 476bc5ccf88SSatish Balay PetscFunctionReturn(0); 477bc5ccf88SSatish Balay } 478bc5ccf88SSatish Balay 4794a2ae208SSatish Balay #undef __FUNCT__ 4804a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 481dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 482bc5ccf88SSatish Balay { 483bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48491c97fd4SSatish Balay Mat_SeqAIJ *a=(Mat_SeqAIJ *)aij->A->data; 4856849ba73SBarry Smith PetscErrorCode ierr; 486b1d57f15SBarry Smith PetscMPIInt n; 487b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 488e44c0bd4SBarry Smith PetscInt *row,*col; 489e44c0bd4SBarry Smith PetscTruth other_disassembled; 49087828ca2SBarry Smith PetscScalar *val; 491bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 492bc5ccf88SSatish Balay 49391c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */ 494bc5ccf88SSatish Balay PetscFunctionBegin; 495bc5ccf88SSatish Balay if (!aij->donotstash) { 496a2d1c673SSatish Balay while (1) { 4978798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 498a2d1c673SSatish Balay if (!flg) break; 499a2d1c673SSatish Balay 500bc5ccf88SSatish Balay for (i=0; i<n;) { 501bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 502bc5ccf88SSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 503bc5ccf88SSatish Balay if (j < n) ncols = j-i; 504bc5ccf88SSatish Balay else ncols = n-i; 505bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 506bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 507bc5ccf88SSatish Balay i = j; 508bc5ccf88SSatish Balay } 509bc5ccf88SSatish Balay } 5108798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 511bc5ccf88SSatish Balay } 5122f53aa61SHong Zhang a->compressedrow.use = PETSC_FALSE; 513bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 514bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 515bc5ccf88SSatish Balay 516bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 517bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 518bc5ccf88SSatish Balay /* 519bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 520bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 521bc5ccf88SSatish Balay */ 522bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 5237adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 524bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 525bc5ccf88SSatish Balay ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 526ad59fb31SSatish Balay } 527ad59fb31SSatish Balay } 528bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 529bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 530bc5ccf88SSatish Balay } 5314e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 53291c97fd4SSatish Balay ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */ 533bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 534bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 535bc5ccf88SSatish Balay 536606d414cSSatish Balay ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 537606d414cSSatish Balay aij->rowvalues = 0; 538a30b2313SHong Zhang 539a30b2313SHong Zhang /* used by MatAXPY() */ 54091c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 54191c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 542a30b2313SHong Zhang 543a7420bb7SBarry Smith if (aij->diag) {ierr = VecDestroy(aij->diag);CHKERRQ(ierr);aij->diag = 0;} 544bd0c2dcbSBarry Smith if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ; 545bc5ccf88SSatish Balay PetscFunctionReturn(0); 546bc5ccf88SSatish Balay } 547bc5ccf88SSatish Balay 5484a2ae208SSatish Balay #undef __FUNCT__ 5494a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 550dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 5511eb62cbbSBarry Smith { 55244a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 553dfbe8321SBarry Smith PetscErrorCode ierr; 5543a40ed3dSBarry Smith 5553a40ed3dSBarry Smith PetscFunctionBegin; 55678b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 55778b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 5583a40ed3dSBarry Smith PetscFunctionReturn(0); 5591eb62cbbSBarry Smith } 5601eb62cbbSBarry Smith 5614a2ae208SSatish Balay #undef __FUNCT__ 5624a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 563f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag) 5641eb62cbbSBarry Smith { 56544a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 5666849ba73SBarry Smith PetscErrorCode ierr; 5677adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 568d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 569b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 570b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 571b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 572d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 5737adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 5741eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 5751eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 5766543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5776543fbbaSBarry Smith PetscTruth found = PETSC_FALSE; 5786543fbbaSBarry Smith #endif 5791eb62cbbSBarry Smith 5803a40ed3dSBarry Smith PetscFunctionBegin; 5811eb62cbbSBarry Smith /* first count number of contributors to each processor */ 582b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 583b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 584b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 5856543fbbaSBarry Smith j = 0; 5861eb62cbbSBarry Smith for (i=0; i<N; i++) { 5876543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 5886543fbbaSBarry Smith lastidx = idx; 5896543fbbaSBarry Smith for (; j<size; j++) { 5901eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 5916543fbbaSBarry Smith nprocs[2*j]++; 5926543fbbaSBarry Smith nprocs[2*j+1] = 1; 5936543fbbaSBarry Smith owner[i] = j; 5946543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5956543fbbaSBarry Smith found = PETSC_TRUE; 5966543fbbaSBarry Smith #endif 5976543fbbaSBarry Smith break; 5981eb62cbbSBarry Smith } 5991eb62cbbSBarry Smith } 6006543fbbaSBarry Smith #if defined(PETSC_DEBUG) 60129bbc08cSBarry Smith if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 6026543fbbaSBarry Smith found = PETSC_FALSE; 6036543fbbaSBarry Smith #endif 6041eb62cbbSBarry Smith } 605c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 6061eb62cbbSBarry Smith 6071eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 608c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 6091eb62cbbSBarry Smith 6101eb62cbbSBarry Smith /* post receives: */ 611b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 612b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 6131eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 614b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 6151eb62cbbSBarry Smith } 6161eb62cbbSBarry Smith 6171eb62cbbSBarry Smith /* do sends: 6181eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 6191eb62cbbSBarry Smith the ith processor 6201eb62cbbSBarry Smith */ 621b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 622b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 623b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 6241eb62cbbSBarry Smith starts[0] = 0; 625c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6261eb62cbbSBarry Smith for (i=0; i<N; i++) { 6271eb62cbbSBarry Smith svalues[starts[owner[i]]++] = rows[i]; 6281eb62cbbSBarry Smith } 6291eb62cbbSBarry Smith 6301eb62cbbSBarry Smith starts[0] = 0; 631c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6321eb62cbbSBarry Smith count = 0; 63317699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 634c1dc657dSBarry Smith if (nprocs[2*i+1]) { 635b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 6361eb62cbbSBarry Smith } 6371eb62cbbSBarry Smith } 638606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 6391eb62cbbSBarry Smith 64017699dbbSLois Curfman McInnes base = owners[rank]; 6411eb62cbbSBarry Smith 6421eb62cbbSBarry Smith /* wait on receives */ 643b1d57f15SBarry Smith ierr = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr); 6441eb62cbbSBarry Smith source = lens + nrecvs; 6451eb62cbbSBarry Smith count = nrecvs; slen = 0; 6461eb62cbbSBarry Smith while (count) { 647ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 6481eb62cbbSBarry Smith /* unpack receives into our local space */ 649b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 650d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 651d6dfbf8fSBarry Smith lens[imdex] = n; 6521eb62cbbSBarry Smith slen += n; 6531eb62cbbSBarry Smith count--; 6541eb62cbbSBarry Smith } 655606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 6561eb62cbbSBarry Smith 6571eb62cbbSBarry Smith /* move the data into the send scatter */ 658b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 6591eb62cbbSBarry Smith count = 0; 6601eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 6611eb62cbbSBarry Smith values = rvalues + i*nmax; 6621eb62cbbSBarry Smith for (j=0; j<lens[i]; j++) { 6631eb62cbbSBarry Smith lrows[count++] = values[j] - base; 6641eb62cbbSBarry Smith } 6651eb62cbbSBarry Smith } 666606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 667606d414cSSatish Balay ierr = PetscFree(lens);CHKERRQ(ierr); 668606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 669606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 6701eb62cbbSBarry Smith 6711eb62cbbSBarry Smith /* actually zap the local rows */ 6726eb55b6aSBarry Smith /* 6736eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 674a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 6756eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 6766eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 6776eb55b6aSBarry Smith 6786eb55b6aSBarry Smith */ 679e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 680f4df32b1SMatthew Knepley ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr); 681d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 682f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr); 683f4df32b1SMatthew Knepley } else if (diag != 0.0) { 684f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 685fa46199cSSatish Balay if (((Mat_SeqAIJ*)l->A->data)->nonew) { 68629bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\ 687512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 6886525c446SSatish Balay } 689e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 690e2d53e46SBarry Smith row = lrows[i] + rstart; 691f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 692e2d53e46SBarry Smith } 693e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 694e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 6956eb55b6aSBarry Smith } else { 696f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 6976eb55b6aSBarry Smith } 698606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 69972dacd9aSBarry Smith 7001eb62cbbSBarry Smith /* wait on sends */ 7011eb62cbbSBarry Smith if (nsends) { 702b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 703ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 704606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 7051eb62cbbSBarry Smith } 706606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 707606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 7081eb62cbbSBarry Smith 7093a40ed3dSBarry Smith PetscFunctionReturn(0); 7101eb62cbbSBarry Smith } 7111eb62cbbSBarry Smith 7124a2ae208SSatish Balay #undef __FUNCT__ 7134a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 714dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 7151eb62cbbSBarry Smith { 716416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 717dfbe8321SBarry Smith PetscErrorCode ierr; 718b1d57f15SBarry Smith PetscInt nt; 719416022c9SBarry Smith 7203a40ed3dSBarry Smith PetscFunctionBegin; 721a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 722d0f46423SBarry Smith if (nt != A->cmap->n) { 723d0f46423SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 724fbd6ef76SBarry Smith } 725ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 726f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 727ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 728f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 7293a40ed3dSBarry Smith PetscFunctionReturn(0); 7301eb62cbbSBarry Smith } 7311eb62cbbSBarry Smith 7324a2ae208SSatish Balay #undef __FUNCT__ 733bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ" 734bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx) 735bd0c2dcbSBarry Smith { 736bd0c2dcbSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 737bd0c2dcbSBarry Smith PetscErrorCode ierr; 738bd0c2dcbSBarry Smith 739bd0c2dcbSBarry Smith PetscFunctionBegin; 740bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr); 741bd0c2dcbSBarry Smith PetscFunctionReturn(0); 742bd0c2dcbSBarry Smith } 743bd0c2dcbSBarry Smith 744bd0c2dcbSBarry Smith #undef __FUNCT__ 7454a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 746dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 747da3a660dSBarry Smith { 748416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 749dfbe8321SBarry Smith PetscErrorCode ierr; 7503a40ed3dSBarry Smith 7513a40ed3dSBarry Smith PetscFunctionBegin; 752ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 753f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 754ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 755f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 7563a40ed3dSBarry Smith PetscFunctionReturn(0); 757da3a660dSBarry Smith } 758da3a660dSBarry Smith 7594a2ae208SSatish Balay #undef __FUNCT__ 7604a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 761dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 762da3a660dSBarry Smith { 763416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 764dfbe8321SBarry Smith PetscErrorCode ierr; 765a5ff213dSBarry Smith PetscTruth merged; 766da3a660dSBarry Smith 7673a40ed3dSBarry Smith PetscFunctionBegin; 768a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 769da3a660dSBarry Smith /* do nondiagonal part */ 7707c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 771a5ff213dSBarry Smith if (!merged) { 772da3a660dSBarry Smith /* send it on its way */ 773ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 774da3a660dSBarry Smith /* do local part */ 7757c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 776da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 777a5ff213dSBarry Smith /* added in yy until the next line, */ 778ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 779a5ff213dSBarry Smith } else { 780a5ff213dSBarry Smith /* do local part */ 781a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 782a5ff213dSBarry Smith /* send it on its way */ 783ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 784a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 785ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 786a5ff213dSBarry Smith } 7873a40ed3dSBarry Smith PetscFunctionReturn(0); 788da3a660dSBarry Smith } 789da3a660dSBarry Smith 790cd0d46ebSvictorle EXTERN_C_BEGIN 791cd0d46ebSvictorle #undef __FUNCT__ 7925fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 79313c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f) 794cd0d46ebSvictorle { 7954f423910Svictorle MPI_Comm comm; 796cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ *) Amat->data, *Bij; 79766501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 798cd0d46ebSvictorle IS Me,Notme; 7996849ba73SBarry Smith PetscErrorCode ierr; 800b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 801b1d57f15SBarry Smith PetscMPIInt size; 802cd0d46ebSvictorle 803cd0d46ebSvictorle PetscFunctionBegin; 80442e5f5b4Svictorle 80542e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 80666501d38Svictorle Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A; 8075485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 808cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 8094f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 810b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 811b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 81242e5f5b4Svictorle 81342e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 814cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 815cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 816b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 817cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 818cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 819268466fbSBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr); 820268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 821268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 82266501d38Svictorle Aoff = Aoffs[0]; 823268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 82466501d38Svictorle Boff = Boffs[0]; 8255485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 82666501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 82766501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 82842e5f5b4Svictorle ierr = ISDestroy(Me);CHKERRQ(ierr); 82942e5f5b4Svictorle ierr = ISDestroy(Notme);CHKERRQ(ierr); 83042e5f5b4Svictorle 831cd0d46ebSvictorle PetscFunctionReturn(0); 832cd0d46ebSvictorle } 833cd0d46ebSvictorle EXTERN_C_END 834cd0d46ebSvictorle 8354a2ae208SSatish Balay #undef __FUNCT__ 8364a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 837dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 838da3a660dSBarry Smith { 839416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 840dfbe8321SBarry Smith PetscErrorCode ierr; 841da3a660dSBarry Smith 8423a40ed3dSBarry Smith PetscFunctionBegin; 843da3a660dSBarry Smith /* do nondiagonal part */ 8447c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 845da3a660dSBarry Smith /* send it on its way */ 846ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 847da3a660dSBarry Smith /* do local part */ 8487c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 849a5ff213dSBarry Smith /* receive remote parts */ 850ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 8513a40ed3dSBarry Smith PetscFunctionReturn(0); 852da3a660dSBarry Smith } 853da3a660dSBarry Smith 8541eb62cbbSBarry Smith /* 8551eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 8561eb62cbbSBarry Smith diagonal block 8571eb62cbbSBarry Smith */ 8584a2ae208SSatish Balay #undef __FUNCT__ 8594a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 860dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 8611eb62cbbSBarry Smith { 862dfbe8321SBarry Smith PetscErrorCode ierr; 863416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 8643a40ed3dSBarry Smith 8653a40ed3dSBarry Smith PetscFunctionBegin; 866d0f46423SBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 867d0f46423SBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) { 86829bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 8693a40ed3dSBarry Smith } 8703a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 8713a40ed3dSBarry Smith PetscFunctionReturn(0); 8721eb62cbbSBarry Smith } 8731eb62cbbSBarry Smith 8744a2ae208SSatish Balay #undef __FUNCT__ 8754a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 876f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 877052efed2SBarry Smith { 878052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 879dfbe8321SBarry Smith PetscErrorCode ierr; 8803a40ed3dSBarry Smith 8813a40ed3dSBarry Smith PetscFunctionBegin; 882f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 883f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 8843a40ed3dSBarry Smith PetscFunctionReturn(0); 885052efed2SBarry Smith } 886052efed2SBarry Smith 8874a2ae208SSatish Balay #undef __FUNCT__ 8884a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 889dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 8901eb62cbbSBarry Smith { 89144a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 892dfbe8321SBarry Smith PetscErrorCode ierr; 89383e2fdc7SBarry Smith 8943a40ed3dSBarry Smith PetscFunctionBegin; 895aa482453SBarry Smith #if defined(PETSC_USE_LOG) 896d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 897a5a9c739SBarry Smith #endif 8988798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 899a7420bb7SBarry Smith if (aij->diag) {ierr = VecDestroy(aij->diag);CHKERRQ(ierr);} 90078b31e54SBarry Smith ierr = MatDestroy(aij->A);CHKERRQ(ierr); 90178b31e54SBarry Smith ierr = MatDestroy(aij->B);CHKERRQ(ierr); 902aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 9039c666560SBarry Smith if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);} 904b1fc9764SSatish Balay #else 90505b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 906b1fc9764SSatish Balay #endif 90705b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 9087c922b88SBarry Smith if (aij->lvec) {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);} 9097c922b88SBarry Smith if (aij->Mvctx) {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);} 91005b42c5fSBarry Smith ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 9118aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 912606d414cSSatish Balay ierr = PetscFree(aij);CHKERRQ(ierr); 913901853e0SKris Buschelman 914dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 915901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 916901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 917901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 918901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr); 919901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 920ff69c46cSKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr); 921901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr); 922471cc821SHong Zhang ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpiaij_mpisbaij_C","",PETSC_NULL);CHKERRQ(ierr); 9233a40ed3dSBarry Smith PetscFunctionReturn(0); 9241eb62cbbSBarry Smith } 925ee50ffe9SBarry Smith 9264a2ae208SSatish Balay #undef __FUNCT__ 9278e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 928dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 9298e2fed03SBarry Smith { 9308e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 9318e2fed03SBarry Smith Mat_SeqAIJ* A = (Mat_SeqAIJ*)aij->A->data; 9328e2fed03SBarry Smith Mat_SeqAIJ* B = (Mat_SeqAIJ*)aij->B->data; 9336849ba73SBarry Smith PetscErrorCode ierr; 93432dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 9356f69ff64SBarry Smith int fd; 936a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 937d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 9388e2fed03SBarry Smith PetscScalar *column_values; 9398e2fed03SBarry Smith 9408e2fed03SBarry Smith PetscFunctionBegin; 9417adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 9427adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 9438e2fed03SBarry Smith nz = A->nz + B->nz; 944958c9bccSBarry Smith if (!rank) { 9458e2fed03SBarry Smith header[0] = MAT_FILE_COOKIE; 946d0f46423SBarry Smith header[1] = mat->rmap->N; 947d0f46423SBarry Smith header[2] = mat->cmap->N; 9487adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 9498e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 9506f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9518e2fed03SBarry Smith /* get largest number of rows any processor has */ 952d0f46423SBarry Smith rlen = mat->rmap->n; 953d0f46423SBarry Smith range = mat->rmap->range; 9548e2fed03SBarry Smith for (i=1; i<size; i++) { 9558e2fed03SBarry Smith rlen = PetscMax(rlen,range[i+1] - range[i]); 9568e2fed03SBarry Smith } 9578e2fed03SBarry Smith } else { 9587adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 959d0f46423SBarry Smith rlen = mat->rmap->n; 9608e2fed03SBarry Smith } 9618e2fed03SBarry Smith 9628e2fed03SBarry Smith /* load up the local row counts */ 963b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 964d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9658e2fed03SBarry Smith row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 9668e2fed03SBarry Smith } 9678e2fed03SBarry Smith 9688e2fed03SBarry Smith /* store the row lengths to the file */ 969958c9bccSBarry Smith if (!rank) { 9708e2fed03SBarry Smith MPI_Status status; 971d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9728e2fed03SBarry Smith for (i=1; i<size; i++) { 9738e2fed03SBarry Smith rlen = range[i+1] - range[i]; 9747adad957SLisandro Dalcin ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9756f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9768e2fed03SBarry Smith } 9778e2fed03SBarry Smith } else { 978d0f46423SBarry Smith ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9798e2fed03SBarry Smith } 9808e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 9818e2fed03SBarry Smith 9828e2fed03SBarry Smith /* load up the local column indices */ 9838e2fed03SBarry Smith nzmax = nz; /* )th processor needs space a largest processor needs */ 9847adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 985b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 9868e2fed03SBarry Smith cnt = 0; 987d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9888e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 9898e2fed03SBarry Smith if ( (col = garray[B->j[j]]) > cstart) break; 9908e2fed03SBarry Smith column_indices[cnt++] = col; 9918e2fed03SBarry Smith } 9928e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 9938e2fed03SBarry Smith column_indices[cnt++] = A->j[k] + cstart; 9948e2fed03SBarry Smith } 9958e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 9968e2fed03SBarry Smith column_indices[cnt++] = garray[B->j[j]]; 9978e2fed03SBarry Smith } 9988e2fed03SBarry Smith } 99977431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10008e2fed03SBarry Smith 10018e2fed03SBarry Smith /* store the column indices to the file */ 1002958c9bccSBarry Smith if (!rank) { 10038e2fed03SBarry Smith MPI_Status status; 10046f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 10058e2fed03SBarry Smith for (i=1; i<size; i++) { 10067adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 100777431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10087adad957SLisandro Dalcin ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10096f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 10108e2fed03SBarry Smith } 10118e2fed03SBarry Smith } else { 10127adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10137adad957SLisandro Dalcin ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10148e2fed03SBarry Smith } 10158e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 10168e2fed03SBarry Smith 10178e2fed03SBarry Smith /* load up the local column values */ 10188e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 10198e2fed03SBarry Smith cnt = 0; 1020d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 10218e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 10228e2fed03SBarry Smith if ( garray[B->j[j]] > cstart) break; 10238e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10248e2fed03SBarry Smith } 10258e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 10268e2fed03SBarry Smith column_values[cnt++] = A->a[k]; 10278e2fed03SBarry Smith } 10288e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 10298e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10308e2fed03SBarry Smith } 10318e2fed03SBarry Smith } 103277431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10338e2fed03SBarry Smith 10348e2fed03SBarry Smith /* store the column values to the file */ 1035958c9bccSBarry Smith if (!rank) { 10368e2fed03SBarry Smith MPI_Status status; 10376f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10388e2fed03SBarry Smith for (i=1; i<size; i++) { 10397adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 104077431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10417adad957SLisandro Dalcin ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10426f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10438e2fed03SBarry Smith } 10448e2fed03SBarry Smith } else { 10457adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10467adad957SLisandro Dalcin ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10478e2fed03SBarry Smith } 10488e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 10498e2fed03SBarry Smith PetscFunctionReturn(0); 10508e2fed03SBarry Smith } 10518e2fed03SBarry Smith 10528e2fed03SBarry Smith #undef __FUNCT__ 10534a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1054dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1055416022c9SBarry Smith { 105644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1057dfbe8321SBarry Smith PetscErrorCode ierr; 105832dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1059d38fa0fbSBarry Smith PetscTruth isdraw,iascii,isbinary; 1060b0a32e0cSBarry Smith PetscViewer sviewer; 1061f3ef73ceSBarry Smith PetscViewerFormat format; 1062416022c9SBarry Smith 10633a40ed3dSBarry Smith PetscFunctionBegin; 1064fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 106532077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 10668e2fed03SBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 106732077d6dSBarry Smith if (iascii) { 1068b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1069456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 10704e220ebcSLois Curfman McInnes MatInfo info; 1071923f20ffSKris Buschelman PetscTruth inodes; 1072923f20ffSKris Buschelman 10737adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 1074888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 1075923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr); 1076923f20ffSKris Buschelman if (!inodes) { 107777431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1078d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10796831982aSBarry Smith } else { 108077431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1081d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10826831982aSBarry Smith } 1083888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 108477431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1085888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 108677431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1087b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 108807d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1089a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 10903a40ed3dSBarry Smith PetscFunctionReturn(0); 1091fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1092923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1093923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1094923f20ffSKris Buschelman if (inodes) { 1095923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1096d38fa0fbSBarry Smith } else { 1097d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1098d38fa0fbSBarry Smith } 10993a40ed3dSBarry Smith PetscFunctionReturn(0); 11004aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 11014aedb280SBarry Smith PetscFunctionReturn(0); 110208480c60SBarry Smith } 11038e2fed03SBarry Smith } else if (isbinary) { 11048e2fed03SBarry Smith if (size == 1) { 11057adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11068e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11078e2fed03SBarry Smith } else { 11088e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 11098e2fed03SBarry Smith } 11108e2fed03SBarry Smith PetscFunctionReturn(0); 11110f5bd95cSBarry Smith } else if (isdraw) { 1112b0a32e0cSBarry Smith PetscDraw draw; 111319bcc07fSBarry Smith PetscTruth isnull; 1114b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1115b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 111619bcc07fSBarry Smith } 111719bcc07fSBarry Smith 111817699dbbSLois Curfman McInnes if (size == 1) { 11197adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 112078b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11213a40ed3dSBarry Smith } else { 112295373324SBarry Smith /* assemble the entire matrix onto first processor. */ 112395373324SBarry Smith Mat A; 1124ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1125d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1126dd6ea824SBarry Smith MatScalar *a; 11272ee70a88SLois Curfman McInnes 112832a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 112990d69ab7SBarry Smith PetscTruth flg = PETSC_FALSE; 113032a366e4SMatthew Knepley 11310c235cafSBarry Smith ierr = PetscOptionsGetTruth(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr); 113232a366e4SMatthew Knepley if (!flg) { 113390d69ab7SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 113432a366e4SMatthew Knepley } 113532a366e4SMatthew Knepley } 11360805154bSBarry Smith 11377adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 113817699dbbSLois Curfman McInnes if (!rank) { 1139f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 11403a40ed3dSBarry Smith } else { 1141f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 114295373324SBarry Smith } 1143f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1144f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 1145f204ca49SKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 114652e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1147416022c9SBarry Smith 114895373324SBarry Smith /* copy over the A part */ 1149ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1150d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1151d0f46423SBarry Smith row = mat->rmap->rstart; 1152d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;} 115395373324SBarry Smith for (i=0; i<m; i++) { 1154416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 115595373324SBarry Smith row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 115695373324SBarry Smith } 11572ee70a88SLois Curfman McInnes aj = Aloc->j; 1158d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;} 115995373324SBarry Smith 116095373324SBarry Smith /* copy over the B part */ 1161ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1162d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1163d0f46423SBarry Smith row = mat->rmap->rstart; 1164b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1165b0a32e0cSBarry Smith ct = cols; 1166bfec09a0SHong Zhang for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];} 116795373324SBarry Smith for (i=0; i<m; i++) { 1168416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 116995373324SBarry Smith row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 117095373324SBarry Smith } 1171606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 11726d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 11736d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 117455843e3eSBarry Smith /* 117555843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1176b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 117755843e3eSBarry Smith */ 1178b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1179e03a110bSBarry Smith if (!rank) { 11807adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11816831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 118295373324SBarry Smith } 1183b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 118478b31e54SBarry Smith ierr = MatDestroy(A);CHKERRQ(ierr); 118595373324SBarry Smith } 11863a40ed3dSBarry Smith PetscFunctionReturn(0); 11871eb62cbbSBarry Smith } 11881eb62cbbSBarry Smith 11894a2ae208SSatish Balay #undef __FUNCT__ 11904a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1191dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1192416022c9SBarry Smith { 1193dfbe8321SBarry Smith PetscErrorCode ierr; 119432077d6dSBarry Smith PetscTruth iascii,isdraw,issocket,isbinary; 1195416022c9SBarry Smith 11963a40ed3dSBarry Smith PetscFunctionBegin; 119732077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 1198fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 1199fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 1200b0a32e0cSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr); 120132077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 12027b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 12035cd90555SBarry Smith } else { 120479a5c55eSBarry Smith SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name); 1205416022c9SBarry Smith } 12063a40ed3dSBarry Smith PetscFunctionReturn(0); 1207416022c9SBarry Smith } 1208416022c9SBarry Smith 12094a2ae208SSatish Balay #undef __FUNCT__ 121041f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ" 121141f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 12128a729477SBarry Smith { 121344a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1214dfbe8321SBarry Smith PetscErrorCode ierr; 12156987fefcSBarry Smith Vec bb1 = 0; 1216bd0c2dcbSBarry Smith PetscTruth hasop; 12178a729477SBarry Smith 12183a40ed3dSBarry Smith PetscFunctionBegin; 121985911e72SJed Brown if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) { 122085911e72SJed Brown ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 122185911e72SJed Brown } 12222798e883SHong Zhang 1223a2b30743SBarry Smith if (flag == SOR_APPLY_UPPER) { 122441f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 1225a2b30743SBarry Smith PetscFunctionReturn(0); 1226a2b30743SBarry Smith } 1227a2b30743SBarry Smith 1228c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 1229da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 123041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12312798e883SHong Zhang its--; 1232da3a660dSBarry Smith } 12332798e883SHong Zhang 12342798e883SHong Zhang while (its--) { 1235ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1236ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12372798e883SHong Zhang 1238c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1239efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1240c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12412798e883SHong Zhang 1242c14dc6b6SHong Zhang /* local sweep */ 124341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12442798e883SHong Zhang } 12453a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP){ 1246da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 124741f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12482798e883SHong Zhang its--; 1249da3a660dSBarry Smith } 12502798e883SHong Zhang while (its--) { 1251ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1252ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12532798e883SHong Zhang 1254c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1255efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1256c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1257c14dc6b6SHong Zhang 1258c14dc6b6SHong Zhang /* local sweep */ 125941f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12602798e883SHong Zhang } 12613a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){ 1262da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 126341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12642798e883SHong Zhang its--; 1265da3a660dSBarry Smith } 12662798e883SHong Zhang while (its--) { 1267ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1268ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12692798e883SHong Zhang 1270c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1271efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1272c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12732798e883SHong Zhang 1274c14dc6b6SHong Zhang /* local sweep */ 127541f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12762798e883SHong Zhang } 1277a7420bb7SBarry Smith } else if (flag & SOR_EISENSTAT) { 1278a7420bb7SBarry Smith Vec xx1; 1279a7420bb7SBarry Smith 1280a7420bb7SBarry Smith ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr); 128141f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr); 1282a7420bb7SBarry Smith 1283a7420bb7SBarry Smith ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1284a7420bb7SBarry Smith ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1285a7420bb7SBarry Smith if (!mat->diag) { 1286a7420bb7SBarry Smith ierr = MatGetVecs(matin,&mat->diag,PETSC_NULL);CHKERRQ(ierr); 1287a7420bb7SBarry Smith ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr); 1288a7420bb7SBarry Smith } 1289bd0c2dcbSBarry Smith ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1290bd0c2dcbSBarry Smith if (hasop) { 1291bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr); 1292bd0c2dcbSBarry Smith } else { 1293a7420bb7SBarry Smith ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr); 1294bd0c2dcbSBarry Smith } 1295887ee2caSBarry Smith ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr); 1296887ee2caSBarry Smith 1297a7420bb7SBarry Smith ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 1298a7420bb7SBarry Smith 1299a7420bb7SBarry Smith /* local sweep */ 130041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr); 1301a7420bb7SBarry Smith ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr); 1302a7420bb7SBarry Smith ierr = VecDestroy(xx1);CHKERRQ(ierr); 13033a40ed3dSBarry Smith } else { 130429bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported"); 1305c16cb8f2SBarry Smith } 1306c14dc6b6SHong Zhang 13076987fefcSBarry Smith if (bb1) {ierr = VecDestroy(bb1);CHKERRQ(ierr);} 13083a40ed3dSBarry Smith PetscFunctionReturn(0); 13098a729477SBarry Smith } 1310a66be287SLois Curfman McInnes 13114a2ae208SSatish Balay #undef __FUNCT__ 131242e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 131342e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 131442e855d1Svictor { 131542e855d1Svictor MPI_Comm comm,pcomm; 13165d0c19d7SBarry Smith PetscInt first,local_size,nrows; 13175d0c19d7SBarry Smith const PetscInt *rows; 1318dbf0e21dSBarry Smith PetscMPIInt size; 131942e855d1Svictor IS crowp,growp,irowp,lrowp,lcolp,icolp; 132042e855d1Svictor PetscErrorCode ierr; 132142e855d1Svictor 132242e855d1Svictor PetscFunctionBegin; 132342e855d1Svictor ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 132442e855d1Svictor /* make a collective version of 'rowp' */ 132542e855d1Svictor ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr); 132642e855d1Svictor if (pcomm==comm) { 132742e855d1Svictor crowp = rowp; 132842e855d1Svictor } else { 132942e855d1Svictor ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr); 133042e855d1Svictor ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr); 133142e855d1Svictor ierr = ISCreateGeneral(comm,nrows,rows,&crowp);CHKERRQ(ierr); 133242e855d1Svictor ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr); 133342e855d1Svictor } 133442e855d1Svictor /* collect the global row permutation and invert it */ 133542e855d1Svictor ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr); 133642e855d1Svictor ierr = ISSetPermutation(growp);CHKERRQ(ierr); 133742e855d1Svictor if (pcomm!=comm) { 133842e855d1Svictor ierr = ISDestroy(crowp);CHKERRQ(ierr); 133942e855d1Svictor } 134042e855d1Svictor ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr); 134142e855d1Svictor /* get the local target indices */ 134242e855d1Svictor ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr); 134342e855d1Svictor ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr); 134442e855d1Svictor ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr); 134542e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp);CHKERRQ(ierr); 134642e855d1Svictor ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr); 134742e855d1Svictor ierr = ISDestroy(irowp);CHKERRQ(ierr); 134842e855d1Svictor /* the column permutation is so much easier; 134942e855d1Svictor make a local version of 'colp' and invert it */ 135042e855d1Svictor ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr); 1351dbf0e21dSBarry Smith ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr); 1352dbf0e21dSBarry Smith if (size==1) { 135342e855d1Svictor lcolp = colp; 135442e855d1Svictor } else { 135542e855d1Svictor ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr); 135642e855d1Svictor ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr); 135742e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp);CHKERRQ(ierr); 135842e855d1Svictor } 1359dbf0e21dSBarry Smith ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 136042e855d1Svictor ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr); 13614aa3045dSJed Brown ierr = ISSetPermutation(icolp);CHKERRQ(ierr); 1362dbf0e21dSBarry Smith if (size>1) { 136342e855d1Svictor ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr); 136442e855d1Svictor ierr = ISDestroy(lcolp);CHKERRQ(ierr); 136542e855d1Svictor } 136642e855d1Svictor /* now we just get the submatrix */ 13674aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr); 136842e855d1Svictor /* clean up */ 136942e855d1Svictor ierr = ISDestroy(lrowp);CHKERRQ(ierr); 137042e855d1Svictor ierr = ISDestroy(icolp);CHKERRQ(ierr); 137142e855d1Svictor PetscFunctionReturn(0); 137242e855d1Svictor } 137342e855d1Svictor 137442e855d1Svictor #undef __FUNCT__ 13754a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1376dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1377a66be287SLois Curfman McInnes { 1378a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1379a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1380dfbe8321SBarry Smith PetscErrorCode ierr; 1381329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1382a66be287SLois Curfman McInnes 13833a40ed3dSBarry Smith PetscFunctionBegin; 13844e220ebcSLois Curfman McInnes info->block_size = 1.0; 13854e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 13864e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 13874e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 13884e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 13894e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 13904e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1391a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 13924e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 13934e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 13944e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 13954e220ebcSLois Curfman McInnes info->memory = isend[3]; 13964e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1397a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 13987adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 13994e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 14004e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 14014e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 14024e220ebcSLois Curfman McInnes info->memory = irecv[3]; 14034e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1404a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 14057adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 14064e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 14074e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 14084e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 14094e220ebcSLois Curfman McInnes info->memory = irecv[3]; 14104e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1411a66be287SLois Curfman McInnes } 14124e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 14134e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 14144e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 14154e220ebcSLois Curfman McInnes 14163a40ed3dSBarry Smith PetscFunctionReturn(0); 1417a66be287SLois Curfman McInnes } 1418a66be287SLois Curfman McInnes 14194a2ae208SSatish Balay #undef __FUNCT__ 14204a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 14214e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg) 1422c74985f6SBarry Smith { 1423c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1424dfbe8321SBarry Smith PetscErrorCode ierr; 1425c74985f6SBarry Smith 14263a40ed3dSBarry Smith PetscFunctionBegin; 142712c028f9SKris Buschelman switch (op) { 1428512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 142912c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 143028b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 1431a9817697SBarry Smith case MAT_KEEP_NONZERO_PATTERN: 143212c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 143312c028f9SKris Buschelman case MAT_USE_INODES: 143412c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 14354e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 14364e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 143712c028f9SKris Buschelman break; 143812c028f9SKris Buschelman case MAT_ROW_ORIENTED: 14394e0d8c25SBarry Smith a->roworiented = flg; 14404e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 14414e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 144212c028f9SKris Buschelman break; 14434e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1444290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 144512c028f9SKris Buschelman break; 144612c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 14477c922b88SBarry Smith a->donotstash = PETSC_TRUE; 144812c028f9SKris Buschelman break; 144977e54ba9SKris Buschelman case MAT_SYMMETRIC: 14504e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 145125f421beSHong Zhang break; 145277e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1453eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1454eeffb40dSHong Zhang break; 1455bf108f30SBarry Smith case MAT_HERMITIAN: 1456eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1457eeffb40dSHong Zhang break; 1458bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 14594e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 146077e54ba9SKris Buschelman break; 146112c028f9SKris Buschelman default: 1462ad86a440SBarry Smith SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op); 14633a40ed3dSBarry Smith } 14643a40ed3dSBarry Smith PetscFunctionReturn(0); 1465c74985f6SBarry Smith } 1466c74985f6SBarry Smith 14674a2ae208SSatish Balay #undef __FUNCT__ 14684a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1469b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 147039e00950SLois Curfman McInnes { 1471154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 147287828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 14736849ba73SBarry Smith PetscErrorCode ierr; 1474d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1475d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1476b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 147739e00950SLois Curfman McInnes 14783a40ed3dSBarry Smith PetscFunctionBegin; 1479abc0a331SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active"); 14807a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 14817a0afa10SBarry Smith 148270f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 14837a0afa10SBarry Smith /* 14847a0afa10SBarry Smith allocate enough space to hold information from the longest row. 14857a0afa10SBarry Smith */ 14867a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1487b1d57f15SBarry Smith PetscInt max = 1,tmp; 1488d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 14897a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 14907a0afa10SBarry Smith if (max < tmp) { max = tmp; } 14917a0afa10SBarry Smith } 1492b1d57f15SBarry Smith ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr); 1493b1d57f15SBarry Smith mat->rowindices = (PetscInt*)(mat->rowvalues + max); 14947a0afa10SBarry Smith } 14957a0afa10SBarry Smith 149629bbc08cSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows") 1497abc0e9e4SLois Curfman McInnes lrow = row - rstart; 149839e00950SLois Curfman McInnes 1499154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1500154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1501154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1502f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1503f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1504154123eaSLois Curfman McInnes nztot = nzA + nzB; 1505154123eaSLois Curfman McInnes 150670f0671dSBarry Smith cmap = mat->garray; 1507154123eaSLois Curfman McInnes if (v || idx) { 1508154123eaSLois Curfman McInnes if (nztot) { 1509154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1510b1d57f15SBarry Smith PetscInt imark = -1; 1511154123eaSLois Curfman McInnes if (v) { 151270f0671dSBarry Smith *v = v_p = mat->rowvalues; 151339e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 151470f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1515154123eaSLois Curfman McInnes else break; 1516154123eaSLois Curfman McInnes } 1517154123eaSLois Curfman McInnes imark = i; 151870f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 151970f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1520154123eaSLois Curfman McInnes } 1521154123eaSLois Curfman McInnes if (idx) { 152270f0671dSBarry Smith *idx = idx_p = mat->rowindices; 152370f0671dSBarry Smith if (imark > -1) { 152470f0671dSBarry Smith for (i=0; i<imark; i++) { 152570f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 152670f0671dSBarry Smith } 152770f0671dSBarry Smith } else { 1528154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 152970f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1530154123eaSLois Curfman McInnes else break; 1531154123eaSLois Curfman McInnes } 1532154123eaSLois Curfman McInnes imark = i; 153370f0671dSBarry Smith } 153470f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 153570f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 153639e00950SLois Curfman McInnes } 15373f97c4b0SBarry Smith } else { 15381ca473b0SSatish Balay if (idx) *idx = 0; 15391ca473b0SSatish Balay if (v) *v = 0; 15401ca473b0SSatish Balay } 1541154123eaSLois Curfman McInnes } 154239e00950SLois Curfman McInnes *nz = nztot; 1543f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1544f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 15453a40ed3dSBarry Smith PetscFunctionReturn(0); 154639e00950SLois Curfman McInnes } 154739e00950SLois Curfman McInnes 15484a2ae208SSatish Balay #undef __FUNCT__ 15494a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1550b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 155139e00950SLois Curfman McInnes { 15527a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 15533a40ed3dSBarry Smith 15543a40ed3dSBarry Smith PetscFunctionBegin; 1555abc0a331SBarry Smith if (!aij->getrowactive) { 1556abc0a331SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 15577a0afa10SBarry Smith } 15587a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 15593a40ed3dSBarry Smith PetscFunctionReturn(0); 156039e00950SLois Curfman McInnes } 156139e00950SLois Curfman McInnes 15624a2ae208SSatish Balay #undef __FUNCT__ 15634a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 1564dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 1565855ac2c5SLois Curfman McInnes { 1566855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1567ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 1568dfbe8321SBarry Smith PetscErrorCode ierr; 1569d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 1570329f5518SBarry Smith PetscReal sum = 0.0; 1571a77337e4SBarry Smith MatScalar *v; 157204ca555eSLois Curfman McInnes 15733a40ed3dSBarry Smith PetscFunctionBegin; 157417699dbbSLois Curfman McInnes if (aij->size == 1) { 157514183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 157637fa93a5SLois Curfman McInnes } else { 157704ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 157804ca555eSLois Curfman McInnes v = amat->a; 157904ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 1580aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1581329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 158204ca555eSLois Curfman McInnes #else 158304ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 158404ca555eSLois Curfman McInnes #endif 158504ca555eSLois Curfman McInnes } 158604ca555eSLois Curfman McInnes v = bmat->a; 158704ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 1588aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1589329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 159004ca555eSLois Curfman McInnes #else 159104ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 159204ca555eSLois Curfman McInnes #endif 159304ca555eSLois Curfman McInnes } 15947adad957SLisandro Dalcin ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 159504ca555eSLois Curfman McInnes *norm = sqrt(*norm); 15963a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 1597329f5518SBarry Smith PetscReal *tmp,*tmp2; 1598b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 1599d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 1600d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 1601d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 160204ca555eSLois Curfman McInnes *norm = 0.0; 160304ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 160404ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 1605bfec09a0SHong Zhang tmp[cstart + *jj++ ] += PetscAbsScalar(*v); v++; 160604ca555eSLois Curfman McInnes } 160704ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 160804ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 1609bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 161004ca555eSLois Curfman McInnes } 1611d0f46423SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 1612d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 161304ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 161404ca555eSLois Curfman McInnes } 1615606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 1616606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 16173a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 1618329f5518SBarry Smith PetscReal ntemp = 0.0; 1619d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 1620bfec09a0SHong Zhang v = amat->a + amat->i[j]; 162104ca555eSLois Curfman McInnes sum = 0.0; 162204ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 1623cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 162404ca555eSLois Curfman McInnes } 1625bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 162604ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 1627cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 162804ca555eSLois Curfman McInnes } 1629515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 163004ca555eSLois Curfman McInnes } 16317adad957SLisandro Dalcin ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr); 1632ca161407SBarry Smith } else { 163329bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"No support for two norm"); 163404ca555eSLois Curfman McInnes } 163537fa93a5SLois Curfman McInnes } 16363a40ed3dSBarry Smith PetscFunctionReturn(0); 1637855ac2c5SLois Curfman McInnes } 1638855ac2c5SLois Curfman McInnes 16394a2ae208SSatish Balay #undef __FUNCT__ 16404a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 1641fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 1642b7c46309SBarry Smith { 1643b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1644da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 1645dfbe8321SBarry Smith PetscErrorCode ierr; 1646d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz; 1647d0f46423SBarry Smith PetscInt cstart=A->cmap->rstart,ncol; 16483a40ed3dSBarry Smith Mat B; 1649a77337e4SBarry Smith MatScalar *array; 1650b7c46309SBarry Smith 16513a40ed3dSBarry Smith PetscFunctionBegin; 1652e9695a30SBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 1653da668accSHong Zhang 1654d0f46423SBarry Smith ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; 1655da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 1656da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 1657fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 1658fc73b1b3SBarry Smith /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */ 1659fc73b1b3SBarry Smith ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 1660da668accSHong Zhang ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr); 1661da668accSHong Zhang for (i=0; i<ai[ma]; i++){ 1662da668accSHong Zhang d_nnz[aj[i]] ++; 1663da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 1664d4bb536fSBarry Smith } 1665d4bb536fSBarry Smith 16667adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 1667d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 16687adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1669da668accSHong Zhang ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr); 1670fc73b1b3SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 1671fc4dec0aSBarry Smith } else { 1672fc4dec0aSBarry Smith B = *matout; 1673fc4dec0aSBarry Smith } 1674b7c46309SBarry Smith 1675b7c46309SBarry Smith /* copy over the A part */ 1676da668accSHong Zhang array = Aloc->a; 1677d0f46423SBarry Smith row = A->rmap->rstart; 1678da668accSHong Zhang for (i=0; i<ma; i++) { 1679da668accSHong Zhang ncol = ai[i+1]-ai[i]; 1680da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 1681da668accSHong Zhang row++; array += ncol; aj += ncol; 1682b7c46309SBarry Smith } 1683b7c46309SBarry Smith aj = Aloc->j; 1684da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 1685b7c46309SBarry Smith 1686b7c46309SBarry Smith /* copy over the B part */ 1687fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1688fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 1689da668accSHong Zhang array = Bloc->a; 1690d0f46423SBarry Smith row = A->rmap->rstart; 1691da668accSHong Zhang for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];} 169261a2fbbaSHong Zhang cols_tmp = cols; 1693da668accSHong Zhang for (i=0; i<mb; i++) { 1694da668accSHong Zhang ncol = bi[i+1]-bi[i]; 169561a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 169661a2fbbaSHong Zhang row++; array += ncol; cols_tmp += ncol; 1697b7c46309SBarry Smith } 1698fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 1699fc73b1b3SBarry Smith 17006d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 17016d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1702815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 17030de55854SLois Curfman McInnes *matout = B; 17040de55854SLois Curfman McInnes } else { 1705273d9f13SBarry Smith ierr = MatHeaderCopy(A,B);CHKERRQ(ierr); 17060de55854SLois Curfman McInnes } 17073a40ed3dSBarry Smith PetscFunctionReturn(0); 1708b7c46309SBarry Smith } 1709b7c46309SBarry Smith 17104a2ae208SSatish Balay #undef __FUNCT__ 17114a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 1712dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 1713a008b906SSatish Balay { 17144b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 17154b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 1716dfbe8321SBarry Smith PetscErrorCode ierr; 1717b1d57f15SBarry Smith PetscInt s1,s2,s3; 1718a008b906SSatish Balay 17193a40ed3dSBarry Smith PetscFunctionBegin; 17204b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 17214b967eb1SSatish Balay if (rr) { 1722e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 172329bbc08cSBarry Smith if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 17244b967eb1SSatish Balay /* Overlap communication with computation. */ 1725ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1726a008b906SSatish Balay } 17274b967eb1SSatish Balay if (ll) { 1728e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 172929bbc08cSBarry Smith if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 1730f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 17314b967eb1SSatish Balay } 17324b967eb1SSatish Balay /* scale the diagonal block */ 1733f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 17344b967eb1SSatish Balay 17354b967eb1SSatish Balay if (rr) { 17364b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 1737ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1738f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 17394b967eb1SSatish Balay } 17404b967eb1SSatish Balay 17413a40ed3dSBarry Smith PetscFunctionReturn(0); 1742a008b906SSatish Balay } 1743a008b906SSatish Balay 17444a2ae208SSatish Balay #undef __FUNCT__ 1745521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ" 1746521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs) 17475a838052SSatish Balay { 1748521d7252SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1749521d7252SBarry Smith PetscErrorCode ierr; 1750521d7252SBarry Smith 17513a40ed3dSBarry Smith PetscFunctionBegin; 1752521d7252SBarry Smith ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr); 1753521d7252SBarry Smith ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr); 1754*829b6ff0SJed Brown ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr); 1755*829b6ff0SJed Brown ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr); 17563a40ed3dSBarry Smith PetscFunctionReturn(0); 17575a838052SSatish Balay } 17584a2ae208SSatish Balay #undef __FUNCT__ 17594a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 1760dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 1761bb5a7306SBarry Smith { 1762bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1763dfbe8321SBarry Smith PetscErrorCode ierr; 17643a40ed3dSBarry Smith 17653a40ed3dSBarry Smith PetscFunctionBegin; 1766bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 17673a40ed3dSBarry Smith PetscFunctionReturn(0); 1768bb5a7306SBarry Smith } 1769bb5a7306SBarry Smith 17704a2ae208SSatish Balay #undef __FUNCT__ 17714a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 1772dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag) 1773d4bb536fSBarry Smith { 1774d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 1775d4bb536fSBarry Smith Mat a,b,c,d; 1776d4bb536fSBarry Smith PetscTruth flg; 1777dfbe8321SBarry Smith PetscErrorCode ierr; 1778d4bb536fSBarry Smith 17793a40ed3dSBarry Smith PetscFunctionBegin; 1780d4bb536fSBarry Smith a = matA->A; b = matA->B; 1781d4bb536fSBarry Smith c = matB->A; d = matB->B; 1782d4bb536fSBarry Smith 1783d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 1784abc0a331SBarry Smith if (flg) { 1785d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 1786d4bb536fSBarry Smith } 17877adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 17883a40ed3dSBarry Smith PetscFunctionReturn(0); 1789d4bb536fSBarry Smith } 1790d4bb536fSBarry Smith 17914a2ae208SSatish Balay #undef __FUNCT__ 17924a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 1793dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 1794cb5b572fSBarry Smith { 1795dfbe8321SBarry Smith PetscErrorCode ierr; 1796cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 1797cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ *)B->data; 1798cb5b572fSBarry Smith 1799cb5b572fSBarry Smith PetscFunctionBegin; 180033f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 180133f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 1802cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 1803cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 1804cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 1805cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 1806cb5b572fSBarry Smith then copying the submatrices */ 1807cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1808cb5b572fSBarry Smith } else { 1809cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 1810cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 1811cb5b572fSBarry Smith } 1812cb5b572fSBarry Smith PetscFunctionReturn(0); 1813cb5b572fSBarry Smith } 1814cb5b572fSBarry Smith 18154a2ae208SSatish Balay #undef __FUNCT__ 18164a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ" 1817dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A) 1818273d9f13SBarry Smith { 1819dfbe8321SBarry Smith PetscErrorCode ierr; 1820273d9f13SBarry Smith 1821273d9f13SBarry Smith PetscFunctionBegin; 1822273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 1823273d9f13SBarry Smith PetscFunctionReturn(0); 1824273d9f13SBarry Smith } 1825273d9f13SBarry Smith 1826ac90fabeSBarry Smith #include "petscblaslapack.h" 1827ac90fabeSBarry Smith #undef __FUNCT__ 1828ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 1829f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 1830ac90fabeSBarry Smith { 1831dfbe8321SBarry Smith PetscErrorCode ierr; 1832b1d57f15SBarry Smith PetscInt i; 1833ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data; 18344ce68768SBarry Smith PetscBLASInt bnz,one=1; 1835ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 1836ac90fabeSBarry Smith 1837ac90fabeSBarry Smith PetscFunctionBegin; 1838ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 1839f4df32b1SMatthew Knepley PetscScalar alpha = a; 1840ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->A->data; 1841ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->A->data; 18420805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1843f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1844ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->B->data; 1845ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->B->data; 18460805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1847f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1848a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 1849f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 1850c537a176SHong Zhang 1851c537a176SHong Zhang x = (Mat_SeqAIJ *)xx->B->data; 1852a30b2313SHong Zhang y = (Mat_SeqAIJ *)yy->B->data; 1853a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 1854a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 1855a30b2313SHong Zhang ierr = MatDestroy(y->XtoY);CHKERRQ(ierr); 1856c537a176SHong Zhang } 1857a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 1858d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 1859a30b2313SHong Zhang y->XtoY = xx->B; 1860407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 1861c537a176SHong Zhang } 1862f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 1863ac90fabeSBarry Smith } else { 1864f4df32b1SMatthew Knepley ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 1865ac90fabeSBarry Smith } 1866ac90fabeSBarry Smith PetscFunctionReturn(0); 1867ac90fabeSBarry Smith } 1868ac90fabeSBarry Smith 1869354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat); 1870354c94deSBarry Smith 1871354c94deSBarry Smith #undef __FUNCT__ 1872354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 1873354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat) 1874354c94deSBarry Smith { 1875354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 1876354c94deSBarry Smith PetscErrorCode ierr; 1877354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 1878354c94deSBarry Smith 1879354c94deSBarry Smith PetscFunctionBegin; 1880354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 1881354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 1882354c94deSBarry Smith #else 1883354c94deSBarry Smith PetscFunctionBegin; 1884354c94deSBarry Smith #endif 1885354c94deSBarry Smith PetscFunctionReturn(0); 1886354c94deSBarry Smith } 1887354c94deSBarry Smith 188899cafbc1SBarry Smith #undef __FUNCT__ 188999cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 189099cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 189199cafbc1SBarry Smith { 189299cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 189399cafbc1SBarry Smith PetscErrorCode ierr; 189499cafbc1SBarry Smith 189599cafbc1SBarry Smith PetscFunctionBegin; 189699cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 189799cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 189899cafbc1SBarry Smith PetscFunctionReturn(0); 189999cafbc1SBarry Smith } 190099cafbc1SBarry Smith 190199cafbc1SBarry Smith #undef __FUNCT__ 190299cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 190399cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 190499cafbc1SBarry Smith { 190599cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 190699cafbc1SBarry Smith PetscErrorCode ierr; 190799cafbc1SBarry Smith 190899cafbc1SBarry Smith PetscFunctionBegin; 190999cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 191099cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 191199cafbc1SBarry Smith PetscFunctionReturn(0); 191299cafbc1SBarry Smith } 191399cafbc1SBarry Smith 1914103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 1915103bf8bdSMatthew Knepley 1916103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 1917a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 1918a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 1919a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 1920103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 1921a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 1922d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 1923103bf8bdSMatthew Knepley 1924103bf8bdSMatthew Knepley #undef __FUNCT__ 1925103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 1926103bf8bdSMatthew Knepley /* 1927103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1928103bf8bdSMatthew Knepley */ 19290481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 1930103bf8bdSMatthew Knepley { 1931a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 1932a2c909beSMatthew Knepley 1933a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1934a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 1935a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 1936a2c909beSMatthew Knepley 1937103bf8bdSMatthew Knepley PetscTruth row_identity, col_identity; 1938776b82aeSLisandro Dalcin PetscContainer c; 1939103bf8bdSMatthew Knepley PetscInt m, n, M, N; 1940103bf8bdSMatthew Knepley PetscErrorCode ierr; 1941103bf8bdSMatthew Knepley 1942103bf8bdSMatthew Knepley PetscFunctionBegin; 1943103bf8bdSMatthew Knepley if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 1944103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 1945103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 1946103bf8bdSMatthew Knepley if (!row_identity || !col_identity) { 1947103bf8bdSMatthew Knepley SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 1948103bf8bdSMatthew Knepley } 1949103bf8bdSMatthew Knepley 1950103bf8bdSMatthew Knepley process_group_type pg; 1951a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1952a2c909beSMatthew Knepley lgraph_type* lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 1953a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1954a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1955a2c909beSMatthew Knepley 1956103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 1957a2c909beSMatthew Knepley ilu_permuted(level_graph); 1958103bf8bdSMatthew Knepley 1959103bf8bdSMatthew Knepley /* put together the new matrix */ 19607adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr); 1961103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 1962103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 1963719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 1964719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 1965719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1966719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1967103bf8bdSMatthew Knepley 19687adad957SLisandro Dalcin ierr = PetscContainerCreate(((PetscObject)A)->comm, &c); 1969776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 1970719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 1971103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1972103bf8bdSMatthew Knepley } 1973103bf8bdSMatthew Knepley 1974103bf8bdSMatthew Knepley #undef __FUNCT__ 1975103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 19760481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 1977103bf8bdSMatthew Knepley { 1978103bf8bdSMatthew Knepley PetscFunctionBegin; 1979103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1980103bf8bdSMatthew Knepley } 1981103bf8bdSMatthew Knepley 1982103bf8bdSMatthew Knepley #undef __FUNCT__ 1983103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 1984103bf8bdSMatthew Knepley /* 1985103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1986103bf8bdSMatthew Knepley */ 1987103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 1988103bf8bdSMatthew Knepley { 1989a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1990a2c909beSMatthew Knepley 1991a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1992a2c909beSMatthew Knepley lgraph_type* lgraph_p; 1993776b82aeSLisandro Dalcin PetscContainer c; 1994103bf8bdSMatthew Knepley PetscErrorCode ierr; 1995103bf8bdSMatthew Knepley 1996103bf8bdSMatthew Knepley PetscFunctionBegin; 1997103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr); 1998776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr); 1999103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 2000a2c909beSMatthew Knepley 2001a2c909beSMatthew Knepley PetscScalar* array_x; 2002a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 2003a2c909beSMatthew Knepley PetscInt sx; 2004a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 2005a2c909beSMatthew Knepley 2006a2c909beSMatthew Knepley PetscScalar* array_b; 2007a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 2008a2c909beSMatthew Knepley PetscInt sb; 2009a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 2010a2c909beSMatthew Knepley 2011a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2012a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2013a2c909beSMatthew Knepley 2014a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 2015a2c909beSMatthew Knepley array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]), 2016a2c909beSMatthew Knepley ref_x(array_x, boost::extents[num_vertices(graph)]); 2017a2c909beSMatthew Knepley 2018a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 2019a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 2020a2c909beSMatthew Knepley gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)), 2021a2c909beSMatthew Knepley vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 2022a2c909beSMatthew Knepley 2023a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 2024a2c909beSMatthew Knepley 2025103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2026103bf8bdSMatthew Knepley } 2027103bf8bdSMatthew Knepley #endif 2028103bf8bdSMatthew Knepley 202969db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */ 203069db28dcSHong Zhang PetscInt nzlocal,nsends,nrecvs; 2031aa5bb8c0SSatish Balay PetscMPIInt *send_rank; 2032aa5bb8c0SSatish Balay PetscInt *sbuf_nz,*sbuf_j,**rbuf_j; 203369db28dcSHong Zhang PetscScalar *sbuf_a,**rbuf_a; 203469db28dcSHong Zhang PetscErrorCode (*MatDestroy)(Mat); 203569db28dcSHong Zhang } Mat_Redundant; 203669db28dcSHong Zhang 203769db28dcSHong Zhang #undef __FUNCT__ 203869db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant" 203969db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr) 204069db28dcSHong Zhang { 204169db28dcSHong Zhang PetscErrorCode ierr; 204269db28dcSHong Zhang Mat_Redundant *redund=(Mat_Redundant*)ptr; 204369db28dcSHong Zhang PetscInt i; 204469db28dcSHong Zhang 204569db28dcSHong Zhang PetscFunctionBegin; 204669db28dcSHong Zhang ierr = PetscFree(redund->send_rank);CHKERRQ(ierr); 204769db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 204869db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 204969db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++){ 205069db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 205169db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 205269db28dcSHong Zhang } 205369db28dcSHong Zhang ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 205469db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 205569db28dcSHong Zhang PetscFunctionReturn(0); 205669db28dcSHong Zhang } 205769db28dcSHong Zhang 205869db28dcSHong Zhang #undef __FUNCT__ 205969db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 206069db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 206169db28dcSHong Zhang { 206269db28dcSHong Zhang PetscErrorCode ierr; 206369db28dcSHong Zhang PetscContainer container; 206469db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 206569db28dcSHong Zhang 206669db28dcSHong Zhang PetscFunctionBegin; 206769db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 206869db28dcSHong Zhang if (container) { 206969db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 207069db28dcSHong Zhang } else { 207169db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 207269db28dcSHong Zhang } 207369db28dcSHong Zhang A->ops->destroy = redund->MatDestroy; 207469db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr); 207569db28dcSHong Zhang ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 207669db28dcSHong Zhang ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 207769db28dcSHong Zhang PetscFunctionReturn(0); 207869db28dcSHong Zhang } 207969db28dcSHong Zhang 208069db28dcSHong Zhang #undef __FUNCT__ 208169db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 208269db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant) 208369db28dcSHong Zhang { 208469db28dcSHong Zhang PetscMPIInt rank,size; 20857adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mat)->comm; 208669db28dcSHong Zhang PetscErrorCode ierr; 208769db28dcSHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0; 208869db28dcSHong Zhang PetscMPIInt *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL; 2089d0f46423SBarry Smith PetscInt *rowrange=mat->rmap->range; 209069db28dcSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 209169db28dcSHong Zhang Mat A=aij->A,B=aij->B,C=*matredundant; 209269db28dcSHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 209369db28dcSHong Zhang PetscScalar *sbuf_a; 209469db28dcSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2095d0f46423SBarry Smith PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 2096d0f46423SBarry Smith PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N; 209769db28dcSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2098a77337e4SBarry Smith MatScalar *aworkA,*aworkB; 2099a77337e4SBarry Smith PetscScalar *vals; 210069db28dcSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 210169db28dcSHong Zhang MPI_Request *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL, 210269db28dcSHong Zhang *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL; 210369db28dcSHong Zhang MPI_Status recv_status,*send_status; 210469db28dcSHong Zhang PetscInt *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count; 210569db28dcSHong Zhang PetscInt **rbuf_j=PETSC_NULL; 210669db28dcSHong Zhang PetscScalar **rbuf_a=PETSC_NULL; 210769db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 210869db28dcSHong Zhang PetscContainer container; 210969db28dcSHong Zhang 211069db28dcSHong Zhang PetscFunctionBegin; 211169db28dcSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 211269db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 211369db28dcSHong Zhang 211469db28dcSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 211569db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2116d0f46423SBarry Smith if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 211769db28dcSHong Zhang ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr); 211869db28dcSHong Zhang if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size"); 211969db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 212069db28dcSHong Zhang if (container) { 212169db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 212269db28dcSHong Zhang } else { 212369db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 212469db28dcSHong Zhang } 212569db28dcSHong Zhang if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 212669db28dcSHong Zhang 212769db28dcSHong Zhang nsends = redund->nsends; 212869db28dcSHong Zhang nrecvs = redund->nrecvs; 212969db28dcSHong Zhang send_rank = redund->send_rank; recv_rank = send_rank + size; 213069db28dcSHong Zhang sbuf_nz = redund->sbuf_nz; rbuf_nz = sbuf_nz + nsends; 213169db28dcSHong Zhang sbuf_j = redund->sbuf_j; 213269db28dcSHong Zhang sbuf_a = redund->sbuf_a; 213369db28dcSHong Zhang rbuf_j = redund->rbuf_j; 213469db28dcSHong Zhang rbuf_a = redund->rbuf_a; 213569db28dcSHong Zhang } 213669db28dcSHong Zhang 213769db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 213869db28dcSHong Zhang PetscMPIInt subrank,subsize; 213969db28dcSHong Zhang PetscInt nleftover,np_subcomm; 214069db28dcSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 214169db28dcSHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 214269db28dcSHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 214369db28dcSHong Zhang ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank); 214469db28dcSHong Zhang recv_rank = send_rank + size; 214569db28dcSHong Zhang np_subcomm = size/nsubcomm; 214669db28dcSHong Zhang nleftover = size - nsubcomm*np_subcomm; 214769db28dcSHong Zhang nsends = 0; nrecvs = 0; 214869db28dcSHong Zhang for (i=0; i<size; i++){ /* i=rank*/ 214969db28dcSHong Zhang if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */ 215069db28dcSHong Zhang send_rank[nsends] = i; nsends++; 215169db28dcSHong Zhang recv_rank[nrecvs++] = i; 215269db28dcSHong Zhang } 215369db28dcSHong Zhang } 215469db28dcSHong Zhang if (rank >= size - nleftover){/* this proc is a leftover processor */ 215569db28dcSHong Zhang i = size-nleftover-1; 215669db28dcSHong Zhang j = 0; 215769db28dcSHong Zhang while (j < nsubcomm - nleftover){ 215869db28dcSHong Zhang send_rank[nsends++] = i; 215969db28dcSHong Zhang i--; j++; 216069db28dcSHong Zhang } 216169db28dcSHong Zhang } 216269db28dcSHong Zhang 216369db28dcSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */ 216469db28dcSHong Zhang for (i=0; i<nleftover; i++){ 216569db28dcSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 216669db28dcSHong Zhang } 216769db28dcSHong Zhang } 216869db28dcSHong Zhang 216969db28dcSHong Zhang /* allocate sbuf_j, sbuf_a */ 217069db28dcSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 217169db28dcSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 217269db28dcSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 217369db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 217469db28dcSHong Zhang 217569db28dcSHong Zhang /* copy mat's local entries into the buffers */ 217669db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 217769db28dcSHong Zhang rownz_max = 0; 217869db28dcSHong Zhang rptr = sbuf_j; 217969db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 218069db28dcSHong Zhang vals = sbuf_a; 218169db28dcSHong Zhang rptr[0] = 0; 218269db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 218369db28dcSHong Zhang row = i + rstart; 218469db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 218569db28dcSHong Zhang ncols = nzA + nzB; 218669db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 218769db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 218869db28dcSHong Zhang /* load the column indices for this row into cols */ 218969db28dcSHong Zhang lwrite = 0; 219069db28dcSHong Zhang for (l=0; l<nzB; l++) { 219169db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart){ 219269db28dcSHong Zhang vals[lwrite] = aworkB[l]; 219369db28dcSHong Zhang cols[lwrite++] = ctmp; 219469db28dcSHong Zhang } 219569db28dcSHong Zhang } 219669db28dcSHong Zhang for (l=0; l<nzA; l++){ 219769db28dcSHong Zhang vals[lwrite] = aworkA[l]; 219869db28dcSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 219969db28dcSHong Zhang } 220069db28dcSHong Zhang for (l=0; l<nzB; l++) { 220169db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend){ 220269db28dcSHong Zhang vals[lwrite] = aworkB[l]; 220369db28dcSHong Zhang cols[lwrite++] = ctmp; 220469db28dcSHong Zhang } 220569db28dcSHong Zhang } 220669db28dcSHong Zhang vals += ncols; 220769db28dcSHong Zhang cols += ncols; 220869db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 220969db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 221069db28dcSHong Zhang } 221169db28dcSHong Zhang if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 221269db28dcSHong Zhang } else { /* only copy matrix values into sbuf_a */ 221369db28dcSHong Zhang rptr = sbuf_j; 221469db28dcSHong Zhang vals = sbuf_a; 221569db28dcSHong Zhang rptr[0] = 0; 221669db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 221769db28dcSHong Zhang row = i + rstart; 221869db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 221969db28dcSHong Zhang ncols = nzA + nzB; 222069db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 222169db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 222269db28dcSHong Zhang lwrite = 0; 222369db28dcSHong Zhang for (l=0; l<nzB; l++) { 222469db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 222569db28dcSHong Zhang } 222669db28dcSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 222769db28dcSHong Zhang for (l=0; l<nzB; l++) { 222869db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 222969db28dcSHong Zhang } 223069db28dcSHong Zhang vals += ncols; 223169db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 223269db28dcSHong Zhang } 223369db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 223469db28dcSHong Zhang 223569db28dcSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 223669db28dcSHong Zhang /*--------------------------------------------------*/ 223769db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 223869db28dcSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 223969db28dcSHong Zhang s_waits2 = s_waits3 + nsends; 224069db28dcSHong Zhang s_waits1 = s_waits2 + nsends; 224169db28dcSHong Zhang r_waits1 = s_waits1 + nsends; 224269db28dcSHong Zhang r_waits2 = r_waits1 + nrecvs; 224369db28dcSHong Zhang r_waits3 = r_waits2 + nrecvs; 224469db28dcSHong Zhang } else { 224569db28dcSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 224669db28dcSHong Zhang r_waits3 = s_waits3 + nsends; 224769db28dcSHong Zhang } 224869db28dcSHong Zhang 224969db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 225069db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 225169db28dcSHong Zhang /* get new tags to keep the communication clean */ 225269db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 225369db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 225469db28dcSHong Zhang ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 225569db28dcSHong Zhang rbuf_nz = sbuf_nz + nsends; 225669db28dcSHong Zhang 225769db28dcSHong Zhang /* post receives of other's nzlocal */ 225869db28dcSHong Zhang for (i=0; i<nrecvs; i++){ 225969db28dcSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 226069db28dcSHong Zhang } 226169db28dcSHong Zhang /* send nzlocal to others */ 226269db28dcSHong Zhang for (i=0; i<nsends; i++){ 226369db28dcSHong Zhang sbuf_nz[i] = nzlocal; 226469db28dcSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 226569db28dcSHong Zhang } 226669db28dcSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 226769db28dcSHong Zhang count = nrecvs; 226869db28dcSHong Zhang while (count) { 226969db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 227069db28dcSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 227169db28dcSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 227269db28dcSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 227369db28dcSHong Zhang 227469db28dcSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 227569db28dcSHong Zhang rbuf_nz[imdex] += i + 2; 227669db28dcSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 227769db28dcSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 227869db28dcSHong Zhang count--; 227969db28dcSHong Zhang } 228069db28dcSHong Zhang /* wait on sends of nzlocal */ 228169db28dcSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 228269db28dcSHong Zhang /* send mat->i,j to others, and recv from other's */ 228369db28dcSHong Zhang /*------------------------------------------------*/ 228469db28dcSHong Zhang for (i=0; i<nsends; i++){ 228569db28dcSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 228669db28dcSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 228769db28dcSHong Zhang } 228869db28dcSHong Zhang /* wait on receives of mat->i,j */ 228969db28dcSHong Zhang /*------------------------------*/ 229069db28dcSHong Zhang count = nrecvs; 229169db28dcSHong Zhang while (count) { 229269db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 229369db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 229469db28dcSHong Zhang count--; 229569db28dcSHong Zhang } 229669db28dcSHong Zhang /* wait on sends of mat->i,j */ 229769db28dcSHong Zhang /*---------------------------*/ 229869db28dcSHong Zhang if (nsends) { 229969db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 230069db28dcSHong Zhang } 230169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 230269db28dcSHong Zhang 230369db28dcSHong Zhang /* post receives, send and receive mat->a */ 230469db28dcSHong Zhang /*----------------------------------------*/ 230569db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 230669db28dcSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 230769db28dcSHong Zhang } 230869db28dcSHong Zhang for (i=0; i<nsends; i++){ 230969db28dcSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 231069db28dcSHong Zhang } 231169db28dcSHong Zhang count = nrecvs; 231269db28dcSHong Zhang while (count) { 231369db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 231469db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 231569db28dcSHong Zhang count--; 231669db28dcSHong Zhang } 231769db28dcSHong Zhang if (nsends) { 231869db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 231969db28dcSHong Zhang } 232069db28dcSHong Zhang 232169db28dcSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 232269db28dcSHong Zhang 232369db28dcSHong Zhang /* create redundant matrix */ 232469db28dcSHong Zhang /*-------------------------*/ 232569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 232669db28dcSHong Zhang /* compute rownz_max for preallocation */ 232769db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 232869db28dcSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 232969db28dcSHong Zhang rptr = rbuf_j[imdex]; 233069db28dcSHong Zhang for (i=0; i<j; i++){ 233169db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 233269db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 233369db28dcSHong Zhang } 233469db28dcSHong Zhang } 233569db28dcSHong Zhang 233669db28dcSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 233769db28dcSHong Zhang ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 233869db28dcSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 233969db28dcSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr); 234069db28dcSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr); 234169db28dcSHong Zhang } else { 234269db28dcSHong Zhang C = *matredundant; 234369db28dcSHong Zhang } 234469db28dcSHong Zhang 234569db28dcSHong Zhang /* insert local matrix entries */ 234669db28dcSHong Zhang rptr = sbuf_j; 234769db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 234869db28dcSHong Zhang vals = sbuf_a; 234969db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 235069db28dcSHong Zhang row = i + rstart; 235169db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 235269db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 235369db28dcSHong Zhang vals += ncols; 235469db28dcSHong Zhang cols += ncols; 235569db28dcSHong Zhang } 235669db28dcSHong Zhang /* insert received matrix entries */ 235769db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 235869db28dcSHong Zhang rstart = rowrange[recv_rank[imdex]]; 235969db28dcSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 236069db28dcSHong Zhang rptr = rbuf_j[imdex]; 236169db28dcSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 236269db28dcSHong Zhang vals = rbuf_a[imdex]; 236369db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 236469db28dcSHong Zhang row = i + rstart; 236569db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 236669db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 236769db28dcSHong Zhang vals += ncols; 236869db28dcSHong Zhang cols += ncols; 236969db28dcSHong Zhang } 237069db28dcSHong Zhang } 237169db28dcSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 237269db28dcSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 237369db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2374d0f46423SBarry Smith if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N); 237569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 237669db28dcSHong Zhang PetscContainer container; 237769db28dcSHong Zhang *matredundant = C; 237869db28dcSHong Zhang /* create a supporting struct and attach it to C for reuse */ 237938f2d2fdSLisandro Dalcin ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 238069db28dcSHong Zhang ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 238169db28dcSHong Zhang ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr); 238269db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr); 238369db28dcSHong Zhang ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr); 238469db28dcSHong Zhang 238569db28dcSHong Zhang redund->nzlocal = nzlocal; 238669db28dcSHong Zhang redund->nsends = nsends; 238769db28dcSHong Zhang redund->nrecvs = nrecvs; 238869db28dcSHong Zhang redund->send_rank = send_rank; 238969db28dcSHong Zhang redund->sbuf_nz = sbuf_nz; 239069db28dcSHong Zhang redund->sbuf_j = sbuf_j; 239169db28dcSHong Zhang redund->sbuf_a = sbuf_a; 239269db28dcSHong Zhang redund->rbuf_j = rbuf_j; 239369db28dcSHong Zhang redund->rbuf_a = rbuf_a; 239469db28dcSHong Zhang 239569db28dcSHong Zhang redund->MatDestroy = C->ops->destroy; 239669db28dcSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 239769db28dcSHong Zhang } 239869db28dcSHong Zhang PetscFunctionReturn(0); 239969db28dcSHong Zhang } 240069db28dcSHong Zhang 240103bc72f1SMatthew Knepley #undef __FUNCT__ 2402c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2403c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2404c91732d9SHong Zhang { 2405c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2406c91732d9SHong Zhang PetscErrorCode ierr; 2407c91732d9SHong Zhang PetscInt i,*idxb = 0; 2408c91732d9SHong Zhang PetscScalar *va,*vb; 2409c91732d9SHong Zhang Vec vtmp; 2410c91732d9SHong Zhang 2411c91732d9SHong Zhang PetscFunctionBegin; 2412c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 2413c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2414c91732d9SHong Zhang if (idx) { 2415192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 2416d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2417c91732d9SHong Zhang } 2418c91732d9SHong Zhang } 2419c91732d9SHong Zhang 2420d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2421c91732d9SHong Zhang if (idx) { 2422d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2423c91732d9SHong Zhang } 2424c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2425c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2426c91732d9SHong Zhang 2427d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++){ 2428c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 2429c91732d9SHong Zhang va[i] = vb[i]; 2430c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 2431c91732d9SHong Zhang } 2432c91732d9SHong Zhang } 2433c91732d9SHong Zhang 2434c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2435c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2436c91732d9SHong Zhang if (idxb) { 2437c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 2438c91732d9SHong Zhang } 2439c91732d9SHong Zhang ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2440c91732d9SHong Zhang PetscFunctionReturn(0); 2441c91732d9SHong Zhang } 2442c91732d9SHong Zhang 2443c91732d9SHong Zhang #undef __FUNCT__ 2444c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 2445c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2446c87e5d42SMatthew Knepley { 2447c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2448c87e5d42SMatthew Knepley PetscErrorCode ierr; 2449c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 2450c87e5d42SMatthew Knepley PetscScalar *va,*vb; 2451c87e5d42SMatthew Knepley Vec vtmp; 2452c87e5d42SMatthew Knepley 2453c87e5d42SMatthew Knepley PetscFunctionBegin; 2454c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 2455c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2456c87e5d42SMatthew Knepley if (idx) { 2457c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 2458c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2459c87e5d42SMatthew Knepley } 2460c87e5d42SMatthew Knepley } 2461c87e5d42SMatthew Knepley 2462c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2463c87e5d42SMatthew Knepley if (idx) { 2464c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2465c87e5d42SMatthew Knepley } 2466c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2467c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2468c87e5d42SMatthew Knepley 2469c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++){ 2470c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 2471c87e5d42SMatthew Knepley va[i] = vb[i]; 2472c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 2473c87e5d42SMatthew Knepley } 2474c87e5d42SMatthew Knepley } 2475c87e5d42SMatthew Knepley 2476c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2477c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2478c87e5d42SMatthew Knepley if (idxb) { 2479c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 2480c87e5d42SMatthew Knepley } 2481c87e5d42SMatthew Knepley ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2482c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2483c87e5d42SMatthew Knepley } 2484c87e5d42SMatthew Knepley 2485c87e5d42SMatthew Knepley #undef __FUNCT__ 248603bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 248703bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 248803bc72f1SMatthew Knepley { 248903bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2490d0f46423SBarry Smith PetscInt n = A->rmap->n; 2491d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 249203bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 249303bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 249403bc72f1SMatthew Knepley Vec diagV, offdiagV; 249503bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 249603bc72f1SMatthew Knepley PetscInt r; 249703bc72f1SMatthew Knepley PetscErrorCode ierr; 249803bc72f1SMatthew Knepley 249903bc72f1SMatthew Knepley PetscFunctionBegin; 250003bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2501e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2502e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 250303bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 250403bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 250503bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 250603bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 250703bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 250803bc72f1SMatthew Knepley for(r = 0; r < n; ++r) { 2509028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 251003bc72f1SMatthew Knepley a[r] = diagA[r]; 251103bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 251203bc72f1SMatthew Knepley } else { 251303bc72f1SMatthew Knepley a[r] = offdiagA[r]; 251403bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 251503bc72f1SMatthew Knepley } 251603bc72f1SMatthew Knepley } 251703bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 251803bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 251903bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 252003bc72f1SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 252103bc72f1SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 252203bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 252303bc72f1SMatthew Knepley PetscFunctionReturn(0); 252403bc72f1SMatthew Knepley } 252503bc72f1SMatthew Knepley 25265494a064SHong Zhang #undef __FUNCT__ 2527c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 2528c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2529c87e5d42SMatthew Knepley { 2530c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2531c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 2532c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 2533c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 2534c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 2535c87e5d42SMatthew Knepley Vec diagV, offdiagV; 2536c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 2537c87e5d42SMatthew Knepley PetscInt r; 2538c87e5d42SMatthew Knepley PetscErrorCode ierr; 2539c87e5d42SMatthew Knepley 2540c87e5d42SMatthew Knepley PetscFunctionBegin; 2541c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2542c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2543c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 2544c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 2545c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 2546c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 2547c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 2548c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2549c87e5d42SMatthew Knepley for(r = 0; r < n; ++r) { 2550c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 2551c87e5d42SMatthew Knepley a[r] = diagA[r]; 2552c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 2553c87e5d42SMatthew Knepley } else { 2554c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 2555c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 2556c87e5d42SMatthew Knepley } 2557c87e5d42SMatthew Knepley } 2558c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 2559c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 2560c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2561c87e5d42SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 2562c87e5d42SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 2563c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 2564c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2565c87e5d42SMatthew Knepley } 2566c87e5d42SMatthew Knepley 2567c87e5d42SMatthew Knepley #undef __FUNCT__ 2568829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ" 2569f6d58c54SBarry Smith PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat) 25705494a064SHong Zhang { 25715494a064SHong Zhang PetscErrorCode ierr; 2572f6d58c54SBarry Smith Mat *dummy; 25735494a064SHong Zhang 25745494a064SHong Zhang PetscFunctionBegin; 2575f6d58c54SBarry Smith ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr); 2576f6d58c54SBarry Smith *newmat = *dummy; 2577f6d58c54SBarry Smith ierr = PetscFree(dummy);CHKERRQ(ierr); 25785494a064SHong Zhang PetscFunctionReturn(0); 25795494a064SHong Zhang } 25805494a064SHong Zhang 25813acb8795SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*); 25828a729477SBarry Smith /* -------------------------------------------------------------------*/ 2583cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 2584cda55fadSBarry Smith MatGetRow_MPIAIJ, 2585cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 2586cda55fadSBarry Smith MatMult_MPIAIJ, 258797304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 25887c922b88SBarry Smith MatMultTranspose_MPIAIJ, 25897c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 2590103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2591103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 2592103bf8bdSMatthew Knepley #else 2593cda55fadSBarry Smith 0, 2594103bf8bdSMatthew Knepley #endif 2595cda55fadSBarry Smith 0, 2596cda55fadSBarry Smith 0, 259797304618SKris Buschelman /*10*/ 0, 2598cda55fadSBarry Smith 0, 2599cda55fadSBarry Smith 0, 260041f059aeSBarry Smith MatSOR_MPIAIJ, 2601b7c46309SBarry Smith MatTranspose_MPIAIJ, 260297304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 2603cda55fadSBarry Smith MatEqual_MPIAIJ, 2604cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 2605cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 2606cda55fadSBarry Smith MatNorm_MPIAIJ, 260797304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 2608cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 2609cda55fadSBarry Smith MatSetOption_MPIAIJ, 2610cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 2611d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ, 2612cda55fadSBarry Smith 0, 2613103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2614719d5645SBarry Smith 0, 2615103bf8bdSMatthew Knepley #else 2616cda55fadSBarry Smith 0, 2617103bf8bdSMatthew Knepley #endif 2618cda55fadSBarry Smith 0, 2619cda55fadSBarry Smith 0, 2620d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIAIJ, 2621103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2622719d5645SBarry Smith 0, 2623103bf8bdSMatthew Knepley #else 2624cda55fadSBarry Smith 0, 2625103bf8bdSMatthew Knepley #endif 2626cda55fadSBarry Smith 0, 2627cda55fadSBarry Smith 0, 2628cda55fadSBarry Smith 0, 2629d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ, 2630cda55fadSBarry Smith 0, 2631cda55fadSBarry Smith 0, 2632cda55fadSBarry Smith 0, 2633cda55fadSBarry Smith 0, 2634d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ, 2635cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 2636cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 2637cda55fadSBarry Smith MatGetValues_MPIAIJ, 2638cb5b572fSBarry Smith MatCopy_MPIAIJ, 2639d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ, 2640cda55fadSBarry Smith MatScale_MPIAIJ, 2641cda55fadSBarry Smith 0, 2642cda55fadSBarry Smith 0, 2643cda55fadSBarry Smith 0, 2644d519adbfSMatthew Knepley /*49*/ MatSetBlockSize_MPIAIJ, 2645cda55fadSBarry Smith 0, 2646cda55fadSBarry Smith 0, 2647cda55fadSBarry Smith 0, 2648cda55fadSBarry Smith 0, 2649d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ, 2650cda55fadSBarry Smith 0, 2651cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 265242e855d1Svictor MatPermute_MPIAIJ, 2653cda55fadSBarry Smith 0, 2654d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ, 2655e03a110bSBarry Smith MatDestroy_MPIAIJ, 2656e03a110bSBarry Smith MatView_MPIAIJ, 2657357abbc8SBarry Smith 0, 2658a2243be0SBarry Smith 0, 2659d519adbfSMatthew Knepley /*64*/ 0, 2660a2243be0SBarry Smith 0, 2661a2243be0SBarry Smith 0, 2662a2243be0SBarry Smith 0, 2663a2243be0SBarry Smith 0, 2664d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ, 2665c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 2666a2243be0SBarry Smith 0, 2667a2243be0SBarry Smith MatSetColoring_MPIAIJ, 2668dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 2669779c1a83SBarry Smith MatSetValuesAdic_MPIAIJ, 2670dcf5cc72SBarry Smith #else 2671dcf5cc72SBarry Smith 0, 2672dcf5cc72SBarry Smith #endif 267397304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 26743acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ, 267597304618SKris Buschelman 0, 267697304618SKris Buschelman 0, 267797304618SKris Buschelman 0, 267897304618SKris Buschelman 0, 267997304618SKris Buschelman /*80*/ 0, 268097304618SKris Buschelman 0, 268197304618SKris Buschelman 0, 2682d519adbfSMatthew Knepley /*83*/ MatLoad_MPIAIJ, 26836284ec50SHong Zhang 0, 26846284ec50SHong Zhang 0, 26856284ec50SHong Zhang 0, 26866284ec50SHong Zhang 0, 2687865e5f61SKris Buschelman 0, 2688d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ, 268926be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 269026be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 26917a7894deSKris Buschelman MatPtAP_Basic, 26927a7894deSKris Buschelman MatPtAPSymbolic_MPIAIJ, 2693d519adbfSMatthew Knepley /*94*/ MatPtAPNumeric_MPIAIJ, 26947a7894deSKris Buschelman 0, 26957a7894deSKris Buschelman 0, 26967a7894deSKris Buschelman 0, 26977a7894deSKris Buschelman 0, 2698d519adbfSMatthew Knepley /*99*/ 0, 2699865e5f61SKris Buschelman MatPtAPSymbolic_MPIAIJ_MPIAIJ, 27007a7894deSKris Buschelman MatPtAPNumeric_MPIAIJ_MPIAIJ, 27012fd7e33dSBarry Smith MatConjugate_MPIAIJ, 27022fd7e33dSBarry Smith 0, 2703d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ, 270499cafbc1SBarry Smith MatRealPart_MPIAIJ, 270569db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 270669db28dcSHong Zhang 0, 270769db28dcSHong Zhang 0, 2708d519adbfSMatthew Knepley /*109*/0, 270903bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 27105494a064SHong Zhang MatGetRowMin_MPIAIJ, 27115494a064SHong Zhang 0, 27125494a064SHong Zhang 0, 2713bd0c2dcbSBarry Smith /*114*/MatGetSeqNonzerostructure_MPIAIJ, 2714bd0c2dcbSBarry Smith 0, 2715bd0c2dcbSBarry Smith 0, 2716bd0c2dcbSBarry Smith 0, 2717bd0c2dcbSBarry Smith 0, 2718bd0c2dcbSBarry Smith 0 2719bd0c2dcbSBarry Smith }; 272036ce4990SBarry Smith 27212e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 27222e8a6d31SBarry Smith 2723fb2e594dSBarry Smith EXTERN_C_BEGIN 27244a2ae208SSatish Balay #undef __FUNCT__ 27254a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 2726be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat) 27272e8a6d31SBarry Smith { 27282e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2729dfbe8321SBarry Smith PetscErrorCode ierr; 27302e8a6d31SBarry Smith 27312e8a6d31SBarry Smith PetscFunctionBegin; 27322e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 27332e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 27342e8a6d31SBarry Smith PetscFunctionReturn(0); 27352e8a6d31SBarry Smith } 2736fb2e594dSBarry Smith EXTERN_C_END 27372e8a6d31SBarry Smith 2738fb2e594dSBarry Smith EXTERN_C_BEGIN 27394a2ae208SSatish Balay #undef __FUNCT__ 27404a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 2741be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat) 27422e8a6d31SBarry Smith { 27432e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2744dfbe8321SBarry Smith PetscErrorCode ierr; 27452e8a6d31SBarry Smith 27462e8a6d31SBarry Smith PetscFunctionBegin; 27472e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 27482e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 27492e8a6d31SBarry Smith PetscFunctionReturn(0); 27502e8a6d31SBarry Smith } 2751fb2e594dSBarry Smith EXTERN_C_END 27528a729477SBarry Smith 2753e090d566SSatish Balay #include "petscpc.h" 275427508adbSBarry Smith EXTERN_C_BEGIN 27554a2ae208SSatish Balay #undef __FUNCT__ 2756a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 2757be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 2758a23d5eceSKris Buschelman { 2759a23d5eceSKris Buschelman Mat_MPIAIJ *b; 2760dfbe8321SBarry Smith PetscErrorCode ierr; 2761b1d57f15SBarry Smith PetscInt i; 2762a23d5eceSKris Buschelman 2763a23d5eceSKris Buschelman PetscFunctionBegin; 2764a23d5eceSKris Buschelman if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5; 2765a23d5eceSKris Buschelman if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2; 276677431f27SBarry Smith if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 276777431f27SBarry Smith if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 2768899cda47SBarry Smith 276926283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 277026283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 277126283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 277226283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 2773a23d5eceSKris Buschelman if (d_nnz) { 2774d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 277577431f27SBarry Smith if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]); 2776a23d5eceSKris Buschelman } 2777a23d5eceSKris Buschelman } 2778a23d5eceSKris Buschelman if (o_nnz) { 2779d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 278077431f27SBarry Smith if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]); 2781a23d5eceSKris Buschelman } 2782a23d5eceSKris Buschelman } 2783a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 2784899cda47SBarry Smith 2785526dfc15SBarry Smith if (!B->preallocated) { 2786899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 2787899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 2788d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 2789899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 2790899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 2791899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 2792d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 2793899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 2794899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 2795526dfc15SBarry Smith } 2796899cda47SBarry Smith 2797c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 2798c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 2799526dfc15SBarry Smith B->preallocated = PETSC_TRUE; 2800a23d5eceSKris Buschelman PetscFunctionReturn(0); 2801a23d5eceSKris Buschelman } 2802a23d5eceSKris Buschelman EXTERN_C_END 2803a23d5eceSKris Buschelman 28044a2ae208SSatish Balay #undef __FUNCT__ 28054a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 2806dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 2807d6dfbf8fSBarry Smith { 2808d6dfbf8fSBarry Smith Mat mat; 2809416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 2810dfbe8321SBarry Smith PetscErrorCode ierr; 2811d6dfbf8fSBarry Smith 28123a40ed3dSBarry Smith PetscFunctionBegin; 2813416022c9SBarry Smith *newmat = 0; 28147adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 2815d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 28167adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 28171d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2818273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 2819e1b6402fSHong Zhang 2820d6dfbf8fSBarry Smith mat->factor = matin->factor; 2821d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 2822c456f294SBarry Smith mat->assembled = PETSC_TRUE; 2823e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 2824273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 2825d6dfbf8fSBarry Smith 282617699dbbSLois Curfman McInnes a->size = oldmat->size; 282717699dbbSLois Curfman McInnes a->rank = oldmat->rank; 2828e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 2829e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 2830e7641de0SSatish Balay a->rowindices = 0; 2831bcd2baecSBarry Smith a->rowvalues = 0; 2832bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 2833d6dfbf8fSBarry Smith 283426283091SBarry Smith ierr = PetscLayoutCopy(matin->rmap,&mat->rmap);CHKERRQ(ierr); 283526283091SBarry Smith ierr = PetscLayoutCopy(matin->cmap,&mat->cmap);CHKERRQ(ierr); 2836899cda47SBarry Smith 28377adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr); 28382ee70a88SLois Curfman McInnes if (oldmat->colmap) { 2839aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 28400f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 2841b1fc9764SSatish Balay #else 2842d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 2843d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2844d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2845b1fc9764SSatish Balay #endif 2846416022c9SBarry Smith } else a->colmap = 0; 28473f41c07dSBarry Smith if (oldmat->garray) { 2848b1d57f15SBarry Smith PetscInt len; 2849d0f46423SBarry Smith len = oldmat->B->cmap->n; 2850b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 285152e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 2852b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 2853416022c9SBarry Smith } else a->garray = 0; 2854d6dfbf8fSBarry Smith 2855416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 285652e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 2857a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 285852e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 28592e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 286052e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 28612e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 286252e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 28637adad957SLisandro Dalcin ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 28648a729477SBarry Smith *newmat = mat; 28653a40ed3dSBarry Smith PetscFunctionReturn(0); 28668a729477SBarry Smith } 2867416022c9SBarry Smith 2868e090d566SSatish Balay #include "petscsys.h" 2869416022c9SBarry Smith 28704a2ae208SSatish Balay #undef __FUNCT__ 28714a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ" 2872a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat) 2873416022c9SBarry Smith { 2874d65a2f8fSBarry Smith Mat A; 287587828ca2SBarry Smith PetscScalar *vals,*svals; 287619bcc07fSBarry Smith MPI_Comm comm = ((PetscObject)viewer)->comm; 2877416022c9SBarry Smith MPI_Status status; 28786849ba73SBarry Smith PetscErrorCode ierr; 287913980483SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,mpicnt,mpimaxnz; 28807e042019SMatthew Knepley PetscInt i,nz,j,rstart,rend,mmax,maxnz = 0; 2881b1d57f15SBarry Smith PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 2882910ba992SMatthew Knepley PetscInt *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols; 2883dc231df0SBarry Smith PetscInt cend,cstart,n,*rowners; 2884b1d57f15SBarry Smith int fd; 2885416022c9SBarry Smith 28863a40ed3dSBarry Smith PetscFunctionBegin; 28871dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 28881dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 288917699dbbSLois Curfman McInnes if (!rank) { 2890b0a32e0cSBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 28910752156aSBarry Smith ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 2892552e946dSBarry Smith if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 28936c5fab8fSBarry Smith } 28946c5fab8fSBarry Smith 2895b1d57f15SBarry Smith ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 2896416022c9SBarry Smith M = header[1]; N = header[2]; 2897416022c9SBarry Smith /* determine ownership of all rows */ 289829cdbbc8SSatish Balay m = M/size + ((M % size) > rank); 2899dc231df0SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 2900dc231df0SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 2901167e7480SBarry Smith 2902167e7480SBarry Smith /* First process needs enough room for process with most rows */ 2903167e7480SBarry Smith if (!rank) { 2904167e7480SBarry Smith mmax = rowners[1]; 2905167e7480SBarry Smith for (i=2; i<size; i++) { 2906167e7480SBarry Smith mmax = PetscMax(mmax,rowners[i]); 2907167e7480SBarry Smith } 2908167e7480SBarry Smith } else mmax = m; 2909167e7480SBarry Smith 2910416022c9SBarry Smith rowners[0] = 0; 291117699dbbSLois Curfman McInnes for (i=2; i<=size; i++) { 2912416022c9SBarry Smith rowners[i] += rowners[i-1]; 2913416022c9SBarry Smith } 291417699dbbSLois Curfman McInnes rstart = rowners[rank]; 291517699dbbSLois Curfman McInnes rend = rowners[rank+1]; 2916416022c9SBarry Smith 2917416022c9SBarry Smith /* distribute row lengths to all processors */ 2918167e7480SBarry Smith ierr = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr); 291917699dbbSLois Curfman McInnes if (!rank) { 2920dc231df0SBarry Smith ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 2921dc231df0SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 2922b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 2923b1d57f15SBarry Smith ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 2924dc231df0SBarry Smith for (j=0; j<m; j++) { 2925dc231df0SBarry Smith procsnz[0] += ourlens[j]; 2926dc231df0SBarry Smith } 2927dc231df0SBarry Smith for (i=1; i<size; i++) { 2928dc231df0SBarry Smith ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 2929dc231df0SBarry Smith /* calculate the number of nonzeros on each processor */ 2930dc231df0SBarry Smith for (j=0; j<rowners[i+1]-rowners[i]; j++) { 2931416022c9SBarry Smith procsnz[i] += rowlengths[j]; 2932416022c9SBarry Smith } 293313980483SBarry Smith mpicnt = PetscMPIIntCast(rowners[i+1]-rowners[i]); 293413980483SBarry Smith ierr = MPI_Send(rowlengths,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2935416022c9SBarry Smith } 2936606d414cSSatish Balay ierr = PetscFree(rowlengths);CHKERRQ(ierr); 2937dc231df0SBarry Smith } else { 293813980483SBarry Smith mpicnt = PetscMPIIntCast(m);CHKERRQ(ierr); 293913980483SBarry Smith ierr = MPI_Recv(ourlens,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 2940dc231df0SBarry Smith } 2941416022c9SBarry Smith 2942dc231df0SBarry Smith if (!rank) { 2943416022c9SBarry Smith /* determine max buffer needed and allocate it */ 2944416022c9SBarry Smith maxnz = 0; 29458a8e0b3aSBarry Smith for (i=0; i<size; i++) { 29460452661fSBarry Smith maxnz = PetscMax(maxnz,procsnz[i]); 2947416022c9SBarry Smith } 2948b1d57f15SBarry Smith ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2949416022c9SBarry Smith 2950416022c9SBarry Smith /* read in my part of the matrix column indices */ 2951416022c9SBarry Smith nz = procsnz[0]; 2952b1d57f15SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 29530752156aSBarry Smith ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 2954d65a2f8fSBarry Smith 2955d65a2f8fSBarry Smith /* read in every one elses and ship off */ 295617699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 2957d65a2f8fSBarry Smith nz = procsnz[i]; 29580752156aSBarry Smith ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 295913980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 296013980483SBarry Smith ierr = MPI_Send(cols,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2961d65a2f8fSBarry Smith } 2962606d414cSSatish Balay ierr = PetscFree(cols);CHKERRQ(ierr); 29633a40ed3dSBarry Smith } else { 2964416022c9SBarry Smith /* determine buffer space needed for message */ 2965416022c9SBarry Smith nz = 0; 2966416022c9SBarry Smith for (i=0; i<m; i++) { 2967416022c9SBarry Smith nz += ourlens[i]; 2968416022c9SBarry Smith } 2969dc231df0SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 2970416022c9SBarry Smith 2971416022c9SBarry Smith /* receive message of column indices*/ 297213980483SBarry Smith mpicnt = PetscMPIIntCast(nz);CHKERRQ(ierr); 297313980483SBarry Smith ierr = MPI_Recv(mycols,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 297413980483SBarry Smith ierr = MPI_Get_count(&status,MPIU_INT,&mpimaxnz);CHKERRQ(ierr); 29757c533972SBarry Smith if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);} 297613980483SBarry Smith else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);} 297713980483SBarry Smith else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);} 2978416022c9SBarry Smith } 2979416022c9SBarry Smith 2980b362ba68SBarry Smith /* determine column ownership if matrix is not square */ 2981b362ba68SBarry Smith if (N != M) { 2982b362ba68SBarry Smith n = N/size + ((N % size) > rank); 2983b1d57f15SBarry Smith ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 2984b362ba68SBarry Smith cstart = cend - n; 2985b362ba68SBarry Smith } else { 2986b362ba68SBarry Smith cstart = rstart; 2987b362ba68SBarry Smith cend = rend; 2988fb2e594dSBarry Smith n = cend - cstart; 2989b362ba68SBarry Smith } 2990b362ba68SBarry Smith 2991416022c9SBarry Smith /* loop over local rows, determining number of off diagonal entries */ 2992b1d57f15SBarry Smith ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 2993416022c9SBarry Smith jj = 0; 2994416022c9SBarry Smith for (i=0; i<m; i++) { 2995416022c9SBarry Smith for (j=0; j<ourlens[i]; j++) { 2996b362ba68SBarry Smith if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 2997416022c9SBarry Smith jj++; 2998416022c9SBarry Smith } 2999416022c9SBarry Smith } 3000d65a2f8fSBarry Smith 3001d65a2f8fSBarry Smith /* create our matrix */ 3002416022c9SBarry Smith for (i=0; i<m; i++) { 3003416022c9SBarry Smith ourlens[i] -= offlens[i]; 3004416022c9SBarry Smith } 3005f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&A);CHKERRQ(ierr); 3006f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr); 3007d10c748bSKris Buschelman ierr = MatSetType(A,type);CHKERRQ(ierr); 3008d10c748bSKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr); 3009d10c748bSKris Buschelman 3010d65a2f8fSBarry Smith for (i=0; i<m; i++) { 3011d65a2f8fSBarry Smith ourlens[i] += offlens[i]; 3012d65a2f8fSBarry Smith } 3013416022c9SBarry Smith 301417699dbbSLois Curfman McInnes if (!rank) { 3015906b51c7SHong Zhang ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 3016416022c9SBarry Smith 3017416022c9SBarry Smith /* read in my part of the matrix numerical values */ 3018416022c9SBarry Smith nz = procsnz[0]; 30190752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 3020d65a2f8fSBarry Smith 3021d65a2f8fSBarry Smith /* insert into matrix */ 3022d65a2f8fSBarry Smith jj = rstart; 3023d65a2f8fSBarry Smith smycols = mycols; 3024d65a2f8fSBarry Smith svals = vals; 3025d65a2f8fSBarry Smith for (i=0; i<m; i++) { 3026dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 3027d65a2f8fSBarry Smith smycols += ourlens[i]; 3028d65a2f8fSBarry Smith svals += ourlens[i]; 3029d65a2f8fSBarry Smith jj++; 3030416022c9SBarry Smith } 3031416022c9SBarry Smith 3032d65a2f8fSBarry Smith /* read in other processors and ship out */ 303317699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 3034416022c9SBarry Smith nz = procsnz[i]; 30350752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 303613980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 303713980483SBarry Smith ierr = MPI_Send(vals,mpicnt,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr); 3038416022c9SBarry Smith } 3039606d414cSSatish Balay ierr = PetscFree(procsnz);CHKERRQ(ierr); 30403a40ed3dSBarry Smith } else { 3041d65a2f8fSBarry Smith /* receive numeric values */ 304287828ca2SBarry Smith ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 3043416022c9SBarry Smith 3044d65a2f8fSBarry Smith /* receive message of values*/ 304513980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 304613980483SBarry Smith ierr = MPI_Recv(vals,mpicnt,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr); 304713980483SBarry Smith ierr = MPI_Get_count(&status,MPIU_SCALAR,&mpimaxnz);CHKERRQ(ierr); 30487c533972SBarry Smith if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);} 304913980483SBarry Smith else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);} 305013980483SBarry Smith else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);} 3051d65a2f8fSBarry Smith 3052d65a2f8fSBarry Smith /* insert into matrix */ 3053d65a2f8fSBarry Smith jj = rstart; 3054d65a2f8fSBarry Smith smycols = mycols; 3055d65a2f8fSBarry Smith svals = vals; 3056d65a2f8fSBarry Smith for (i=0; i<m; i++) { 3057dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 3058d65a2f8fSBarry Smith smycols += ourlens[i]; 3059d65a2f8fSBarry Smith svals += ourlens[i]; 3060d65a2f8fSBarry Smith jj++; 3061d65a2f8fSBarry Smith } 3062d65a2f8fSBarry Smith } 3063dc231df0SBarry Smith ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 3064606d414cSSatish Balay ierr = PetscFree(vals);CHKERRQ(ierr); 3065606d414cSSatish Balay ierr = PetscFree(mycols);CHKERRQ(ierr); 3066606d414cSSatish Balay ierr = PetscFree(rowners);CHKERRQ(ierr); 3067d65a2f8fSBarry Smith 30686d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 30696d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3070d10c748bSKris Buschelman *newmat = A; 30713a40ed3dSBarry Smith PetscFunctionReturn(0); 3072416022c9SBarry Smith } 3073a0ff6018SBarry Smith 30744a2ae208SSatish Balay #undef __FUNCT__ 30754a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 30764aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat) 30774aa3045dSJed Brown { 30784aa3045dSJed Brown PetscErrorCode ierr; 30794aa3045dSJed Brown IS iscol_local; 30804aa3045dSJed Brown PetscInt csize; 30814aa3045dSJed Brown 30824aa3045dSJed Brown PetscFunctionBegin; 30834aa3045dSJed Brown ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr); 3084b79d0421SJed Brown if (call == MAT_REUSE_MATRIX) { 3085b79d0421SJed Brown ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr); 3086b79d0421SJed Brown if (!iscol_local) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3087b79d0421SJed Brown } else { 30884aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 3089b79d0421SJed Brown } 30904aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr); 3091b79d0421SJed Brown if (call == MAT_INITIAL_MATRIX) { 3092b79d0421SJed Brown ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr); 30934aa3045dSJed Brown ierr = ISDestroy(iscol_local);CHKERRQ(ierr); 3094b79d0421SJed Brown } 30954aa3045dSJed Brown PetscFunctionReturn(0); 30964aa3045dSJed Brown } 30974aa3045dSJed Brown 30984aa3045dSJed Brown #undef __FUNCT__ 30994aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private" 3100a0ff6018SBarry Smith /* 310129da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 310229da9460SBarry Smith in local and then by concatenating the local matrices the end result. 310329da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 31044aa3045dSJed Brown 31054aa3045dSJed Brown Note: This requires a sequential iscol with all indices. 3106a0ff6018SBarry Smith */ 31074aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3108a0ff6018SBarry Smith { 3109dfbe8321SBarry Smith PetscErrorCode ierr; 311032dcc486SBarry Smith PetscMPIInt rank,size; 3111b1d57f15SBarry Smith PetscInt i,m,n,rstart,row,rend,nz,*cwork,j; 3112b1d57f15SBarry Smith PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal; 3113fee21e36SBarry Smith Mat *local,M,Mreuse; 3114a77337e4SBarry Smith MatScalar *vwork,*aa; 31157adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 311600e6dbe6SBarry Smith Mat_SeqAIJ *aij; 31177e2c5f70SBarry Smith 3118a0ff6018SBarry Smith 3119a0ff6018SBarry Smith PetscFunctionBegin; 31201dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 31211dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 312200e6dbe6SBarry Smith 3123fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3124fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr); 3125e005ede5SBarry Smith if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3126fee21e36SBarry Smith local = &Mreuse; 3127fee21e36SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr); 3128fee21e36SBarry Smith } else { 3129a0ff6018SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr); 3130fee21e36SBarry Smith Mreuse = *local; 3131606d414cSSatish Balay ierr = PetscFree(local);CHKERRQ(ierr); 3132fee21e36SBarry Smith } 3133a0ff6018SBarry Smith 3134a0ff6018SBarry Smith /* 3135a0ff6018SBarry Smith m - number of local rows 3136a0ff6018SBarry Smith n - number of columns (same on all processors) 3137a0ff6018SBarry Smith rstart - first row in new global matrix generated 3138a0ff6018SBarry Smith */ 3139fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3140a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3141fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 314200e6dbe6SBarry Smith ii = aij->i; 314300e6dbe6SBarry Smith jj = aij->j; 314400e6dbe6SBarry Smith 3145a0ff6018SBarry Smith /* 314600e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 314700e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3148a0ff6018SBarry Smith */ 314900e6dbe6SBarry Smith 315000e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 31516a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3152ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3153ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3154e2c4fddaSBarry Smith nlocal = m; 31556a6a5d1dSBarry Smith } else { 3156ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3157ab50ec6bSBarry Smith } 3158ab50ec6bSBarry Smith } else { 31596a6a5d1dSBarry Smith nlocal = csize; 31606a6a5d1dSBarry Smith } 3161b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 316200e6dbe6SBarry Smith rstart = rend - nlocal; 31636a6a5d1dSBarry Smith if (rank == size - 1 && rend != n) { 316477431f27SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 31656a6a5d1dSBarry Smith } 316600e6dbe6SBarry Smith 316700e6dbe6SBarry Smith /* next, compute all the lengths */ 3168b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 316900e6dbe6SBarry Smith olens = dlens + m; 317000e6dbe6SBarry Smith for (i=0; i<m; i++) { 317100e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 317200e6dbe6SBarry Smith olen = 0; 317300e6dbe6SBarry Smith dlen = 0; 317400e6dbe6SBarry Smith for (j=0; j<jend; j++) { 317500e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 317600e6dbe6SBarry Smith else dlen++; 317700e6dbe6SBarry Smith jj++; 317800e6dbe6SBarry Smith } 317900e6dbe6SBarry Smith olens[i] = olen; 318000e6dbe6SBarry Smith dlens[i] = dlen; 318100e6dbe6SBarry Smith } 3182f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3183f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 31847adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3185e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3186606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3187a0ff6018SBarry Smith } else { 3188b1d57f15SBarry Smith PetscInt ml,nl; 3189a0ff6018SBarry Smith 3190a0ff6018SBarry Smith M = *newmat; 3191a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 319229bbc08cSBarry Smith if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3193a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3194c48de900SBarry Smith /* 3195c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3196c48de900SBarry Smith rather than the slower MatSetValues(). 3197c48de900SBarry Smith */ 3198c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3199c48de900SBarry Smith M->assembled = PETSC_FALSE; 3200a0ff6018SBarry Smith } 3201a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3202fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 320300e6dbe6SBarry Smith ii = aij->i; 320400e6dbe6SBarry Smith jj = aij->j; 320500e6dbe6SBarry Smith aa = aij->a; 3206a0ff6018SBarry Smith for (i=0; i<m; i++) { 3207a0ff6018SBarry Smith row = rstart + i; 320800e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 320900e6dbe6SBarry Smith cwork = jj; jj += nz; 321000e6dbe6SBarry Smith vwork = aa; aa += nz; 32118c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3212a0ff6018SBarry Smith } 3213a0ff6018SBarry Smith 3214a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3215a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3216a0ff6018SBarry Smith *newmat = M; 3217fee21e36SBarry Smith 3218fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3219fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3220fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3221fee21e36SBarry Smith ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr); 3222fee21e36SBarry Smith } 3223fee21e36SBarry Smith 3224a0ff6018SBarry Smith PetscFunctionReturn(0); 3225a0ff6018SBarry Smith } 3226273d9f13SBarry Smith 3227e2e86b8fSSatish Balay EXTERN_C_BEGIN 32284a2ae208SSatish Balay #undef __FUNCT__ 3229ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 3230b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3231ccd8e176SBarry Smith { 3232899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3233899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3234ccd8e176SBarry Smith const PetscInt *JJ; 3235ccd8e176SBarry Smith PetscScalar *values; 3236ccd8e176SBarry Smith PetscErrorCode ierr; 3237ccd8e176SBarry Smith 3238ccd8e176SBarry Smith PetscFunctionBegin; 3239b7940d39SSatish Balay if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3240899cda47SBarry Smith 324126283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 324226283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 324326283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 324426283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3245d0f46423SBarry Smith m = B->rmap->n; 3246d0f46423SBarry Smith cstart = B->cmap->rstart; 3247d0f46423SBarry Smith cend = B->cmap->rend; 3248d0f46423SBarry Smith rstart = B->rmap->rstart; 3249899cda47SBarry Smith 3250ccd8e176SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 3251ccd8e176SBarry Smith o_nnz = d_nnz + m; 3252ccd8e176SBarry Smith 3253ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3254ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3255ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3256ecc77c7aSBarry Smith JJ = J + Ii[i]; 3257ecc77c7aSBarry Smith if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3258ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3259d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3260ecc77c7aSBarry Smith } 3261ecc77c7aSBarry Smith #endif 3262ecc77c7aSBarry Smith 3263ccd8e176SBarry Smith for (i=0; i<m; i++) { 3264b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3265b7940d39SSatish Balay JJ = J + Ii[i]; 3266ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3267ccd8e176SBarry Smith d = 0; 32680daa03b5SJed Brown for (j=0; j<nnz; j++) { 32690daa03b5SJed Brown if (cstart <= JJ[j] && JJ[j] < cend) d++; 3270ccd8e176SBarry Smith } 3271ccd8e176SBarry Smith d_nnz[i] = d; 3272ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3273ccd8e176SBarry Smith } 3274ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 3275ccd8e176SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 3276ccd8e176SBarry Smith 3277ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3278ccd8e176SBarry Smith else { 3279ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3280ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3281ccd8e176SBarry Smith } 3282ccd8e176SBarry Smith 3283ccd8e176SBarry Smith for (i=0; i<m; i++) { 3284ccd8e176SBarry Smith ii = i + rstart; 3285b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3286b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3287ccd8e176SBarry Smith } 3288ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3289ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3290ccd8e176SBarry Smith 3291ccd8e176SBarry Smith if (!v) { 3292ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3293ccd8e176SBarry Smith } 3294ccd8e176SBarry Smith PetscFunctionReturn(0); 3295ccd8e176SBarry Smith } 3296e2e86b8fSSatish Balay EXTERN_C_END 3297ccd8e176SBarry Smith 3298ccd8e176SBarry Smith #undef __FUNCT__ 3299ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 33001eea217eSSatish Balay /*@ 3301ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3302ccd8e176SBarry Smith (the default parallel PETSc format). 3303ccd8e176SBarry Smith 3304ccd8e176SBarry Smith Collective on MPI_Comm 3305ccd8e176SBarry Smith 3306ccd8e176SBarry Smith Input Parameters: 3307a1661176SMatthew Knepley + B - the matrix 3308ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 33090daa03b5SJed Brown . j - the column indices for each local row (starts with zero) 3310ccd8e176SBarry Smith - v - optional values in the matrix 3311ccd8e176SBarry Smith 3312ccd8e176SBarry Smith Level: developer 3313ccd8e176SBarry Smith 331412251496SSatish Balay Notes: 331512251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 331612251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 331712251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 331812251496SSatish Balay 331912251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 332012251496SSatish Balay 332112251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 332212251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 332312251496SSatish Balay as shown: 332412251496SSatish Balay 332512251496SSatish Balay 1 0 0 332612251496SSatish Balay 2 0 3 P0 332712251496SSatish Balay ------- 332812251496SSatish Balay 4 5 6 P1 332912251496SSatish Balay 333012251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 333112251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 333212251496SSatish Balay j = {0,0,2} [size = nz = 6] 333312251496SSatish Balay v = {1,2,3} [size = nz = 6] 333412251496SSatish Balay 333512251496SSatish Balay Process1 [P1]: rows_owned=[2] 333612251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 333712251496SSatish Balay j = {0,1,2} [size = nz = 6] 333812251496SSatish Balay v = {4,5,6} [size = nz = 6] 333912251496SSatish Balay 3340ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3341ccd8e176SBarry Smith 33422fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ, 33438d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3344ccd8e176SBarry Smith @*/ 3345be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3346ccd8e176SBarry Smith { 3347ccd8e176SBarry Smith PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 3348ccd8e176SBarry Smith 3349ccd8e176SBarry Smith PetscFunctionBegin; 3350ccd8e176SBarry Smith ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr); 3351ccd8e176SBarry Smith if (f) { 3352ccd8e176SBarry Smith ierr = (*f)(B,i,j,v);CHKERRQ(ierr); 3353ccd8e176SBarry Smith } 3354ccd8e176SBarry Smith PetscFunctionReturn(0); 3355ccd8e176SBarry Smith } 3356ccd8e176SBarry Smith 3357ccd8e176SBarry Smith #undef __FUNCT__ 33584a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3359273d9f13SBarry Smith /*@C 3360ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3361273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3362273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3363273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3364273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3365273d9f13SBarry Smith 3366273d9f13SBarry Smith Collective on MPI_Comm 3367273d9f13SBarry Smith 3368273d9f13SBarry Smith Input Parameters: 3369273d9f13SBarry Smith + A - the matrix 3370273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3371273d9f13SBarry Smith (same value is used for all local rows) 3372273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3373273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3374273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3375273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3376273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3377273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3378273d9f13SBarry Smith submatrix (same value is used for all local rows). 3379273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3380273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3381273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3382273d9f13SBarry Smith structure. The size of this array is equal to the number 3383273d9f13SBarry Smith of local rows, i.e 'm'. 3384273d9f13SBarry Smith 338549a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 338649a6f317SBarry Smith 3387273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 3388ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 3389ccd8e176SBarry Smith storage. The stored row and column indices begin with zero. See the users manual for details. 3390273d9f13SBarry Smith 3391273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 3392273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 3393273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 3394273d9f13SBarry Smith 3395273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 3396273d9f13SBarry Smith as the submatrix which is obtained by extraction the part corresponding 3397273d9f13SBarry Smith to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the 3398273d9f13SBarry Smith first row that belongs to the processor, and r2 is the last row belonging 3399273d9f13SBarry Smith to the this processor. This is a square mxm matrix. The remaining portion 3400273d9f13SBarry Smith of the local submatrix (mxN) constitute the OFF-DIAGONAL portion. 3401273d9f13SBarry Smith 3402273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3403273d9f13SBarry Smith 3404aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 3405aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3406aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 3407aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 3408aa95bbe8SBarry Smith 3409273d9f13SBarry Smith Example usage: 3410273d9f13SBarry Smith 3411273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3412273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3413273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3414273d9f13SBarry Smith as follows: 3415273d9f13SBarry Smith 3416273d9f13SBarry Smith .vb 3417273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3418273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3419273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3420273d9f13SBarry Smith ------------------------------------- 3421273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3422273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3423273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3424273d9f13SBarry Smith ------------------------------------- 3425273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3426273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3427273d9f13SBarry Smith .ve 3428273d9f13SBarry Smith 3429273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3430273d9f13SBarry Smith 3431273d9f13SBarry Smith .vb 3432273d9f13SBarry Smith A B C 3433273d9f13SBarry Smith D E F 3434273d9f13SBarry Smith G H I 3435273d9f13SBarry Smith .ve 3436273d9f13SBarry Smith 3437273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3438273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3439273d9f13SBarry Smith 3440273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3441273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3442273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3443273d9f13SBarry Smith 3444273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3445273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3446273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3447273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3448273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3449273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3450273d9f13SBarry Smith 3451273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3452273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3453273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3454273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3455273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3456273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3457273d9f13SBarry Smith .vb 3458273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3459273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3460273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3461273d9f13SBarry Smith .ve 3462273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3463273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3464273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3465273d9f13SBarry Smith 34 values. 3466273d9f13SBarry Smith 3467273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3468273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3469273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3470273d9f13SBarry Smith .vb 3471273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3472273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3473273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3474273d9f13SBarry Smith .ve 3475273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3476273d9f13SBarry Smith hence pre-allocation is perfect. 3477273d9f13SBarry Smith 3478273d9f13SBarry Smith Level: intermediate 3479273d9f13SBarry Smith 3480273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3481273d9f13SBarry Smith 3482ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(), 3483aa95bbe8SBarry Smith MPIAIJ, MatGetInfo() 3484273d9f13SBarry Smith @*/ 3485be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3486273d9f13SBarry Smith { 3487b1d57f15SBarry Smith PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 3488273d9f13SBarry Smith 3489273d9f13SBarry Smith PetscFunctionBegin; 3490a23d5eceSKris Buschelman ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 3491a23d5eceSKris Buschelman if (f) { 3492a23d5eceSKris Buschelman ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3493273d9f13SBarry Smith } 3494273d9f13SBarry Smith PetscFunctionReturn(0); 3495273d9f13SBarry Smith } 3496273d9f13SBarry Smith 34974a2ae208SSatish Balay #undef __FUNCT__ 34982fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 349958d36128SBarry Smith /*@ 35002fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 35012fb0ec9aSBarry Smith CSR format the local rows. 35022fb0ec9aSBarry Smith 35032fb0ec9aSBarry Smith Collective on MPI_Comm 35042fb0ec9aSBarry Smith 35052fb0ec9aSBarry Smith Input Parameters: 35062fb0ec9aSBarry Smith + comm - MPI communicator 35072fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 35082fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 35092fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 35102fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 35112fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 35122fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 35132fb0ec9aSBarry Smith . i - row indices 35142fb0ec9aSBarry Smith . j - column indices 35152fb0ec9aSBarry Smith - a - matrix values 35162fb0ec9aSBarry Smith 35172fb0ec9aSBarry Smith Output Parameter: 35182fb0ec9aSBarry Smith . mat - the matrix 351903bfb495SBarry Smith 35202fb0ec9aSBarry Smith Level: intermediate 35212fb0ec9aSBarry Smith 35222fb0ec9aSBarry Smith Notes: 35232fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 35242fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 35258d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 35262fb0ec9aSBarry Smith 352712251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 352812251496SSatish Balay 352912251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 353012251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 353112251496SSatish Balay as shown: 353212251496SSatish Balay 353312251496SSatish Balay 1 0 0 353412251496SSatish Balay 2 0 3 P0 353512251496SSatish Balay ------- 353612251496SSatish Balay 4 5 6 P1 353712251496SSatish Balay 353812251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 353912251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 354012251496SSatish Balay j = {0,0,2} [size = nz = 6] 354112251496SSatish Balay v = {1,2,3} [size = nz = 6] 354212251496SSatish Balay 354312251496SSatish Balay Process1 [P1]: rows_owned=[2] 354412251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 354512251496SSatish Balay j = {0,1,2} [size = nz = 6] 354612251496SSatish Balay v = {4,5,6} [size = nz = 6] 35472fb0ec9aSBarry Smith 35482fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 35492fb0ec9aSBarry Smith 35502fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 35518d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays() 35522fb0ec9aSBarry Smith @*/ 355382b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 35542fb0ec9aSBarry Smith { 35552fb0ec9aSBarry Smith PetscErrorCode ierr; 35562fb0ec9aSBarry Smith 35572fb0ec9aSBarry Smith PetscFunctionBegin; 35582fb0ec9aSBarry Smith if (i[0]) { 35592fb0ec9aSBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 35602fb0ec9aSBarry Smith } 35612fb0ec9aSBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 35622fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 3563d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 35642fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 35652fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 35662fb0ec9aSBarry Smith PetscFunctionReturn(0); 35672fb0ec9aSBarry Smith } 35682fb0ec9aSBarry Smith 35692fb0ec9aSBarry Smith #undef __FUNCT__ 35704a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ" 3571273d9f13SBarry Smith /*@C 3572273d9f13SBarry Smith MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format 3573273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3574273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3575273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3576273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3577273d9f13SBarry Smith 3578273d9f13SBarry Smith Collective on MPI_Comm 3579273d9f13SBarry Smith 3580273d9f13SBarry Smith Input Parameters: 3581273d9f13SBarry Smith + comm - MPI communicator 3582273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 3583273d9f13SBarry Smith This value should be the same as the local size used in creating the 3584273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 3585273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 3586273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 3587273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 3588273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 3589273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 3590273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3591273d9f13SBarry Smith (same value is used for all local rows) 3592273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3593273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3594273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3595273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3596273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3597273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3598273d9f13SBarry Smith submatrix (same value is used for all local rows). 3599273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3600273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3601273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3602273d9f13SBarry Smith structure. The size of this array is equal to the number 3603273d9f13SBarry Smith of local rows, i.e 'm'. 3604273d9f13SBarry Smith 3605273d9f13SBarry Smith Output Parameter: 3606273d9f13SBarry Smith . A - the matrix 3607273d9f13SBarry Smith 3608175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 3609ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 3610175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 3611175b88e8SBarry Smith 3612273d9f13SBarry Smith Notes: 361349a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 361449a6f317SBarry Smith 3615273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 3616273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 3617273d9f13SBarry Smith storage requirements for this matrix. 3618273d9f13SBarry Smith 3619273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 3620273d9f13SBarry Smith processor than it must be used on all processors that share the object for 3621273d9f13SBarry Smith that argument. 3622273d9f13SBarry Smith 3623273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 3624273d9f13SBarry Smith (possibly both). 3625273d9f13SBarry Smith 362633a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 362733a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 362833a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 362933a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 363033a7c187SSatish Balay values corresponding to [m x N] submatrix. 3631273d9f13SBarry Smith 363233a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 363333a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 363433a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 363533a7c187SSatish Balay 363633a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 363733a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 363833a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 363933a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 364033a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 364133a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 364233a7c187SSatish Balay illustrates this concept. 364333a7c187SSatish Balay 364433a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 364533a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 364633a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 364733a7c187SSatish Balay local matrix (a rectangular submatrix). 3648273d9f13SBarry Smith 3649273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3650273d9f13SBarry Smith 365197d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 365297d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 365397d05335SKris Buschelman type of communicator, use the construction mechanism: 365478102f6cSMatthew Knepley MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...); 365597d05335SKris Buschelman 3656273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 3657273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 3658273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 3659273d9f13SBarry Smith 3660273d9f13SBarry Smith Options Database Keys: 3661923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 3662923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3663273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 3664273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 3665273d9f13SBarry Smith the user still MUST index entries starting at 0! 3666273d9f13SBarry Smith 3667273d9f13SBarry Smith 3668273d9f13SBarry Smith Example usage: 3669273d9f13SBarry Smith 3670273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3671273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3672273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3673273d9f13SBarry Smith as follows: 3674273d9f13SBarry Smith 3675273d9f13SBarry Smith .vb 3676273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3677273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3678273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3679273d9f13SBarry Smith ------------------------------------- 3680273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3681273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3682273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3683273d9f13SBarry Smith ------------------------------------- 3684273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3685273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3686273d9f13SBarry Smith .ve 3687273d9f13SBarry Smith 3688273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3689273d9f13SBarry Smith 3690273d9f13SBarry Smith .vb 3691273d9f13SBarry Smith A B C 3692273d9f13SBarry Smith D E F 3693273d9f13SBarry Smith G H I 3694273d9f13SBarry Smith .ve 3695273d9f13SBarry Smith 3696273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3697273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3698273d9f13SBarry Smith 3699273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3700273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3701273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3702273d9f13SBarry Smith 3703273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3704273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3705273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3706273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3707273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3708273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3709273d9f13SBarry Smith 3710273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3711273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3712273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3713273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3714273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3715273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3716273d9f13SBarry Smith .vb 3717273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3718273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3719273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3720273d9f13SBarry Smith .ve 3721273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3722273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3723273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3724273d9f13SBarry Smith 34 values. 3725273d9f13SBarry Smith 3726273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3727273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3728273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3729273d9f13SBarry Smith .vb 3730273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3731273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3732273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3733273d9f13SBarry Smith .ve 3734273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3735273d9f13SBarry Smith hence pre-allocation is perfect. 3736273d9f13SBarry Smith 3737273d9f13SBarry Smith Level: intermediate 3738273d9f13SBarry Smith 3739273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3740273d9f13SBarry Smith 3741ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 37422fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 3743273d9f13SBarry Smith @*/ 3744be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 3745273d9f13SBarry Smith { 37466849ba73SBarry Smith PetscErrorCode ierr; 3747b1d57f15SBarry Smith PetscMPIInt size; 3748273d9f13SBarry Smith 3749273d9f13SBarry Smith PetscFunctionBegin; 3750f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 3751f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 3752273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 3753273d9f13SBarry Smith if (size > 1) { 3754273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 3755273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3756273d9f13SBarry Smith } else { 3757273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 3758273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 3759273d9f13SBarry Smith } 3760273d9f13SBarry Smith PetscFunctionReturn(0); 3761273d9f13SBarry Smith } 3762195d93cdSBarry Smith 37634a2ae208SSatish Balay #undef __FUNCT__ 37644a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 3765be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[]) 3766195d93cdSBarry Smith { 3767195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 3768b1d57f15SBarry Smith 3769195d93cdSBarry Smith PetscFunctionBegin; 3770195d93cdSBarry Smith *Ad = a->A; 3771195d93cdSBarry Smith *Ao = a->B; 3772195d93cdSBarry Smith *colmap = a->garray; 3773195d93cdSBarry Smith PetscFunctionReturn(0); 3774195d93cdSBarry Smith } 3775a2243be0SBarry Smith 3776a2243be0SBarry Smith #undef __FUNCT__ 3777a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 3778dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 3779a2243be0SBarry Smith { 3780dfbe8321SBarry Smith PetscErrorCode ierr; 3781b1d57f15SBarry Smith PetscInt i; 3782a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3783a2243be0SBarry Smith 3784a2243be0SBarry Smith PetscFunctionBegin; 37858ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 378608b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 3787a2243be0SBarry Smith ISColoring ocoloring; 3788a2243be0SBarry Smith 3789a2243be0SBarry Smith /* set coloring for diagonal portion */ 3790a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 3791a2243be0SBarry Smith 3792a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 37937adad957SLisandro Dalcin ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr); 3794d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3795d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3796a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 3797a2243be0SBarry Smith } 3798a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 3799d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3800a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3801a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3802a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 380308b6dcc0SBarry Smith ISColoringValue *colors; 3804b1d57f15SBarry Smith PetscInt *larray; 3805a2243be0SBarry Smith ISColoring ocoloring; 3806a2243be0SBarry Smith 3807a2243be0SBarry Smith /* set coloring for diagonal portion */ 3808d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3809d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3810d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 3811a2243be0SBarry Smith } 3812d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr); 3813d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3814d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3815a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3816a2243be0SBarry Smith } 3817a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3818d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3819a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 3820a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3821a2243be0SBarry Smith 3822a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 3823d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3824d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr); 3825d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3826d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3827a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3828a2243be0SBarry Smith } 3829a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3830d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3831a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3832a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3833a2243be0SBarry Smith } else { 383477431f27SBarry Smith SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 3835a2243be0SBarry Smith } 3836a2243be0SBarry Smith 3837a2243be0SBarry Smith PetscFunctionReturn(0); 3838a2243be0SBarry Smith } 3839a2243be0SBarry Smith 3840dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 3841a2243be0SBarry Smith #undef __FUNCT__ 3842779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ" 3843dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues) 3844a2243be0SBarry Smith { 3845a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3846dfbe8321SBarry Smith PetscErrorCode ierr; 3847a2243be0SBarry Smith 3848a2243be0SBarry Smith PetscFunctionBegin; 3849779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr); 3850779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr); 3851779c1a83SBarry Smith PetscFunctionReturn(0); 3852779c1a83SBarry Smith } 3853dcf5cc72SBarry Smith #endif 3854779c1a83SBarry Smith 3855779c1a83SBarry Smith #undef __FUNCT__ 3856779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 3857b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 3858779c1a83SBarry Smith { 3859779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3860dfbe8321SBarry Smith PetscErrorCode ierr; 3861779c1a83SBarry Smith 3862779c1a83SBarry Smith PetscFunctionBegin; 3863779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 3864779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 3865a2243be0SBarry Smith PetscFunctionReturn(0); 3866a2243be0SBarry Smith } 3867c5d6d63eSBarry Smith 3868c5d6d63eSBarry Smith #undef __FUNCT__ 386951dd7536SBarry Smith #define __FUNCT__ "MatMerge" 3870bc08b0f1SBarry Smith /*@ 387151dd7536SBarry Smith MatMerge - Creates a single large PETSc matrix by concatinating sequential 387251dd7536SBarry Smith matrices from each processor 3873c5d6d63eSBarry Smith 3874c5d6d63eSBarry Smith Collective on MPI_Comm 3875c5d6d63eSBarry Smith 3876c5d6d63eSBarry Smith Input Parameters: 387751dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 3878d6bb3c2dSHong Zhang . inmat - the input sequential matrices 38790e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3880d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 388151dd7536SBarry Smith 388251dd7536SBarry Smith Output Parameter: 388351dd7536SBarry Smith . outmat - the parallel matrix generated 3884c5d6d63eSBarry Smith 38857e25d530SSatish Balay Level: advanced 38867e25d530SSatish Balay 3887f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 3888c5d6d63eSBarry Smith 3889c5d6d63eSBarry Smith @*/ 3890be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 3891c5d6d63eSBarry Smith { 3892dfbe8321SBarry Smith PetscErrorCode ierr; 3893b7940d39SSatish Balay PetscInt m,N,i,rstart,nnz,Ii,*dnz,*onz; 3894ba8c8a56SBarry Smith PetscInt *indx; 3895ba8c8a56SBarry Smith PetscScalar *values; 3896c5d6d63eSBarry Smith 3897c5d6d63eSBarry Smith PetscFunctionBegin; 38980e36024fSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 3899d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 3900d6bb3c2dSHong Zhang /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */ 39010e36024fSHong Zhang if (n == PETSC_DECIDE){ 3902357abbc8SBarry Smith ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 39030e36024fSHong Zhang } 3904357abbc8SBarry Smith ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 3905357abbc8SBarry Smith rstart -= m; 3906d6bb3c2dSHong Zhang 3907d6bb3c2dSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 3908d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3909ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3910d6bb3c2dSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 3911ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3912d6bb3c2dSHong Zhang } 3913d6bb3c2dSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 3914f69a0ea3SMatthew Knepley ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 3915f69a0ea3SMatthew Knepley ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 3916d6bb3c2dSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 3917d6bb3c2dSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 3918d6bb3c2dSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 3919d6bb3c2dSHong Zhang 3920d6bb3c2dSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 3921d6bb3c2dSHong Zhang ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr); 3922d6bb3c2dSHong Zhang } else { 392377431f27SBarry Smith SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 3924d6bb3c2dSHong Zhang } 3925d6bb3c2dSHong Zhang 3926d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3927ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3928b7940d39SSatish Balay Ii = i + rstart; 3929b7940d39SSatish Balay ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3930ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3931d6bb3c2dSHong Zhang } 3932d6bb3c2dSHong Zhang ierr = MatDestroy(inmat);CHKERRQ(ierr); 3933d6bb3c2dSHong Zhang ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3934d6bb3c2dSHong Zhang ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 393551dd7536SBarry Smith 3936c5d6d63eSBarry Smith PetscFunctionReturn(0); 3937c5d6d63eSBarry Smith } 3938c5d6d63eSBarry Smith 3939c5d6d63eSBarry Smith #undef __FUNCT__ 3940c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 3941dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 3942c5d6d63eSBarry Smith { 3943dfbe8321SBarry Smith PetscErrorCode ierr; 394432dcc486SBarry Smith PetscMPIInt rank; 3945b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 3946de4209c5SBarry Smith size_t len; 3947b1d57f15SBarry Smith const PetscInt *indx; 3948c5d6d63eSBarry Smith PetscViewer out; 3949c5d6d63eSBarry Smith char *name; 3950c5d6d63eSBarry Smith Mat B; 3951b3cc6726SBarry Smith const PetscScalar *values; 3952c5d6d63eSBarry Smith 3953c5d6d63eSBarry Smith PetscFunctionBegin; 3954c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 3955c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 3956f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 3957f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 3958f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 3959f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 3960f204ca49SKris Buschelman ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr); 3961c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 3962c5d6d63eSBarry Smith for (i=0;i<m;i++) { 3963c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3964c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3965c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3966c5d6d63eSBarry Smith } 3967c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3968c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3969c5d6d63eSBarry Smith 39707adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 3971c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 3972c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 3973c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 3974852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 3975c5d6d63eSBarry Smith ierr = PetscFree(name); 3976c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 3977c5d6d63eSBarry Smith ierr = PetscViewerDestroy(out);CHKERRQ(ierr); 3978c5d6d63eSBarry Smith ierr = MatDestroy(B);CHKERRQ(ierr); 3979c5d6d63eSBarry Smith PetscFunctionReturn(0); 3980c5d6d63eSBarry Smith } 3981e5f2cdd8SHong Zhang 398251a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat); 398351a7d1a8SHong Zhang #undef __FUNCT__ 398451a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 3985be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 398651a7d1a8SHong Zhang { 398751a7d1a8SHong Zhang PetscErrorCode ierr; 3988671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 3989776b82aeSLisandro Dalcin PetscContainer container; 399051a7d1a8SHong Zhang 399151a7d1a8SHong Zhang PetscFunctionBegin; 3992671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 3993671beff6SHong Zhang if (container) { 3994776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 399551a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 39963e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 39973e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 399851a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 399951a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 400002c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 400102c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 400205b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 400305b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 400405b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 400526283091SBarry Smith ierr = PetscLayoutDestroy(merge->rowmap);CHKERRQ(ierr); 4006671beff6SHong Zhang 4007776b82aeSLisandro Dalcin ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 4008671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 4009671beff6SHong Zhang } 401051a7d1a8SHong Zhang ierr = PetscFree(merge);CHKERRQ(ierr); 401151a7d1a8SHong Zhang 401251a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 401351a7d1a8SHong Zhang PetscFunctionReturn(0); 401451a7d1a8SHong Zhang } 401551a7d1a8SHong Zhang 40167c4f633dSBarry Smith #include "../src/mat/utils/freespace.h" 4017be0fcf8dSHong Zhang #include "petscbt.h" 40184ebed01fSBarry Smith 4019e5f2cdd8SHong Zhang #undef __FUNCT__ 402038f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric" 4021e5f2cdd8SHong Zhang /*@C 4022f08fae4eSHong Zhang MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential 4023e5f2cdd8SHong Zhang matrices from each processor 4024e5f2cdd8SHong Zhang 4025e5f2cdd8SHong Zhang Collective on MPI_Comm 4026e5f2cdd8SHong Zhang 4027e5f2cdd8SHong Zhang Input Parameters: 4028e5f2cdd8SHong Zhang + comm - the communicators the parallel matrix will live on 4029f08fae4eSHong Zhang . seqmat - the input sequential matrices 40300e36024fSHong Zhang . m - number of local rows (or PETSC_DECIDE) 40310e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4032e5f2cdd8SHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 4033e5f2cdd8SHong Zhang 4034e5f2cdd8SHong Zhang Output Parameter: 4035f08fae4eSHong Zhang . mpimat - the parallel matrix generated 4036e5f2cdd8SHong Zhang 4037e5f2cdd8SHong Zhang Level: advanced 4038e5f2cdd8SHong Zhang 4039affca5deSHong Zhang Notes: 4040affca5deSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 4041affca5deSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 4042affca5deSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 4043e5f2cdd8SHong Zhang @*/ 4044be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat) 404555d1abb9SHong Zhang { 404655d1abb9SHong Zhang PetscErrorCode ierr; 40477adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mpimat)->comm; 404855d1abb9SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4049b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 4050d0f46423SBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j; 4051b1d57f15SBarry Smith PetscInt proc,m; 4052b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 4053b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 4054b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 405555d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 405655d1abb9SHong Zhang MPI_Status *status; 4057a77337e4SBarry Smith MatScalar *aa=a->a; 4058dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 405955d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 4060776b82aeSLisandro Dalcin PetscContainer container; 406155d1abb9SHong Zhang 406255d1abb9SHong Zhang PetscFunctionBegin; 40634ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 40643c2c1871SHong Zhang 406555d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 406655d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 406755d1abb9SHong Zhang 406855d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 406955d1abb9SHong Zhang if (container) { 4070776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 407155d1abb9SHong Zhang } 407255d1abb9SHong Zhang bi = merge->bi; 407355d1abb9SHong Zhang bj = merge->bj; 407455d1abb9SHong Zhang buf_ri = merge->buf_ri; 407555d1abb9SHong Zhang buf_rj = merge->buf_rj; 407655d1abb9SHong Zhang 407755d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 40787a2fc3feSBarry Smith owners = merge->rowmap->range; 407955d1abb9SHong Zhang len_s = merge->len_s; 408055d1abb9SHong Zhang 408155d1abb9SHong Zhang /* send and recv matrix values */ 408255d1abb9SHong Zhang /*-----------------------------*/ 4083357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 408455d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 408555d1abb9SHong Zhang 408655d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 408755d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++){ 408855d1abb9SHong Zhang if (!len_s[proc]) continue; 408955d1abb9SHong Zhang i = owners[proc]; 409055d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 409155d1abb9SHong Zhang k++; 409255d1abb9SHong Zhang } 409355d1abb9SHong Zhang 40940c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 40950c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 409655d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 409755d1abb9SHong Zhang 409855d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 409955d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 410055d1abb9SHong Zhang 410155d1abb9SHong Zhang /* insert mat values of mpimat */ 410255d1abb9SHong Zhang /*----------------------------*/ 4103a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 4104b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 410555d1abb9SHong Zhang nextrow = buf_ri_k + merge->nrecv; 410655d1abb9SHong Zhang nextai = nextrow + merge->nrecv; 410755d1abb9SHong Zhang 410855d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ 410955d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 411055d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 411155d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 411255d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 411355d1abb9SHong Zhang } 411455d1abb9SHong Zhang 411555d1abb9SHong Zhang /* set values of ba */ 41167a2fc3feSBarry Smith m = merge->rowmap->n; 411755d1abb9SHong Zhang for (i=0; i<m; i++) { 411855d1abb9SHong Zhang arow = owners[rank] + i; 411955d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 412055d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4121a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 412255d1abb9SHong Zhang 412355d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 412455d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 412555d1abb9SHong Zhang aj = a->j + ai[arow]; 412655d1abb9SHong Zhang aa = a->a + ai[arow]; 412755d1abb9SHong Zhang nextaj = 0; 412855d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 412955d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 413055d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 413155d1abb9SHong Zhang } 413255d1abb9SHong Zhang } 413355d1abb9SHong Zhang 413455d1abb9SHong Zhang /* add received vals into ba */ 413555d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 413655d1abb9SHong Zhang /* i-th row */ 413755d1abb9SHong Zhang if (i == *nextrow[k]) { 413855d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 413955d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 414055d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 414155d1abb9SHong Zhang nextaj = 0; 414255d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 414355d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 414455d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 414555d1abb9SHong Zhang } 414655d1abb9SHong Zhang } 414755d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 414855d1abb9SHong Zhang } 414955d1abb9SHong Zhang } 415055d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 415155d1abb9SHong Zhang } 415255d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 415355d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 415455d1abb9SHong Zhang 415555d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 415655d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 415755d1abb9SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 41584ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 415955d1abb9SHong Zhang PetscFunctionReturn(0); 416055d1abb9SHong Zhang } 416138f152feSBarry Smith 416238f152feSBarry Smith #undef __FUNCT__ 416338f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic" 4164be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4165e5f2cdd8SHong Zhang { 4166f08fae4eSHong Zhang PetscErrorCode ierr; 416755a3bba9SHong Zhang Mat B_mpi; 4168c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4169b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4170b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4171d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4172b1d57f15SBarry Smith PetscInt len,proc,*dnz,*onz; 4173b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4174b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 417555d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 417658cb9c82SHong Zhang MPI_Status *status; 4177a1a86e44SBarry Smith PetscFreeSpaceList free_space=PETSC_NULL,current_space=PETSC_NULL; 4178be0fcf8dSHong Zhang PetscBT lnkbt; 417951a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4180776b82aeSLisandro Dalcin PetscContainer container; 418102c68681SHong Zhang 4182e5f2cdd8SHong Zhang PetscFunctionBegin; 41834ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 41843c2c1871SHong Zhang 418538f152feSBarry Smith /* make sure it is a PETSc comm */ 418638f152feSBarry Smith ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr); 4187e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4188e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 418955d1abb9SHong Zhang 419051a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4191c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4192e5f2cdd8SHong Zhang 41936abd8857SHong Zhang /* determine row ownership */ 4194f08fae4eSHong Zhang /*---------------------------------------------------------*/ 419526283091SBarry Smith ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr); 419626283091SBarry Smith ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr); 419726283091SBarry Smith ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr); 419826283091SBarry Smith ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr); 419926283091SBarry Smith ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr); 4200b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4201b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 420255d1abb9SHong Zhang 42037a2fc3feSBarry Smith m = merge->rowmap->n; 42047a2fc3feSBarry Smith M = merge->rowmap->N; 42057a2fc3feSBarry Smith owners = merge->rowmap->range; 42066abd8857SHong Zhang 42076abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 42086abd8857SHong Zhang /*---------------------------------------------------------*/ 42093e06a4e6SHong Zhang len_s = merge->len_s; 421051a7d1a8SHong Zhang 42112257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4212c2234fe3SHong Zhang merge->nsend = 0; 4213409913e3SHong Zhang for (proc=0; proc<size; proc++){ 42142257cef7SHong Zhang len_si[proc] = 0; 42153e06a4e6SHong Zhang if (proc == rank){ 42166abd8857SHong Zhang len_s[proc] = 0; 42173e06a4e6SHong Zhang } else { 421802c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 42193e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 42203e06a4e6SHong Zhang } 42213e06a4e6SHong Zhang if (len_s[proc]) { 4222c2234fe3SHong Zhang merge->nsend++; 42232257cef7SHong Zhang nrows = 0; 42242257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 42252257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 42262257cef7SHong Zhang } 42272257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 42282257cef7SHong Zhang len += len_si[proc]; 4229409913e3SHong Zhang } 423058cb9c82SHong Zhang } 4231409913e3SHong Zhang 42322257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 42332257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 423451a7d1a8SHong Zhang ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 423555d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4236671beff6SHong Zhang 42373e06a4e6SHong Zhang /* post the Irecv of j-structure */ 42383e06a4e6SHong Zhang /*-------------------------------*/ 42392c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 42403e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 424102c68681SHong Zhang 42423e06a4e6SHong Zhang /* post the Isend of j-structure */ 4243affca5deSHong Zhang /*--------------------------------*/ 42442257cef7SHong Zhang ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr); 424502c68681SHong Zhang sj_waits = si_waits + merge->nsend; 42463e06a4e6SHong Zhang 42472257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++){ 4248409913e3SHong Zhang if (!len_s[proc]) continue; 424902c68681SHong Zhang i = owners[proc]; 4250b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 425151a7d1a8SHong Zhang k++; 425251a7d1a8SHong Zhang } 425351a7d1a8SHong Zhang 42543e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 42553e06a4e6SHong Zhang /*------------------------------------------------*/ 42560c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 42570c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 425802c68681SHong Zhang 425902c68681SHong Zhang /* send and recv i-structure */ 426002c68681SHong Zhang /*---------------------------*/ 42612c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 426202c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 426302c68681SHong Zhang 4264b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 42653e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 42662257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++){ 426702c68681SHong Zhang if (!len_s[proc]) continue; 42683e06a4e6SHong Zhang /* form outgoing message for i-structure: 42693e06a4e6SHong Zhang buf_si[0]: nrows to be sent 42703e06a4e6SHong Zhang [1:nrows]: row index (global) 42713e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 42723e06a4e6SHong Zhang */ 42733e06a4e6SHong Zhang /*-------------------------------------------*/ 42742257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 42753e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 42763e06a4e6SHong Zhang buf_si[0] = nrows; 42773e06a4e6SHong Zhang buf_si_i[0] = 0; 42783e06a4e6SHong Zhang nrows = 0; 42793e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 42803e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 42813e06a4e6SHong Zhang if (anzi) { 42823e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 42833e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 42843e06a4e6SHong Zhang nrows++; 42853e06a4e6SHong Zhang } 42863e06a4e6SHong Zhang } 4287b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 428802c68681SHong Zhang k++; 42892257cef7SHong Zhang buf_si += len_si[proc]; 429002c68681SHong Zhang } 42912257cef7SHong Zhang 42920c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 42930c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 429402c68681SHong Zhang 4295ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 42963e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++){ 4297ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 42983e06a4e6SHong Zhang } 42993e06a4e6SHong Zhang 43003e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 430102c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 430202c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 43033e06a4e6SHong Zhang ierr = PetscFree(si_waits);CHKERRQ(ierr); 43042257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 43053e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4306bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 430758cb9c82SHong Zhang 4308bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4309bcc1bcd5SHong Zhang /*----------------------------------------------*/ 431058cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4311b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 431258cb9c82SHong Zhang bi[0] = 0; 431358cb9c82SHong Zhang 4314be0fcf8dSHong Zhang /* create and initialize a linked list */ 4315be0fcf8dSHong Zhang nlnk = N+1; 4316be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 431758cb9c82SHong Zhang 4318bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 431958cb9c82SHong Zhang len = 0; 4320bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4321a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 432258cb9c82SHong Zhang current_space = free_space; 432358cb9c82SHong Zhang 4324bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 4325b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 43263e06a4e6SHong Zhang nextrow = buf_ri_k + merge->nrecv; 43273e06a4e6SHong Zhang nextai = nextrow + merge->nrecv; 43283e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++){ 43292257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 43303e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 43313e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 43322257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 43333e06a4e6SHong Zhang } 43342257cef7SHong Zhang 4335bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4336bcc1bcd5SHong Zhang len = 0; 433758cb9c82SHong Zhang for (i=0;i<m;i++) { 433858cb9c82SHong Zhang bnzi = 0; 433958cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 434058cb9c82SHong Zhang arow = owners[rank] + i; 434158cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 434258cb9c82SHong Zhang aj = a->j + ai[arow]; 4343be0fcf8dSHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 434458cb9c82SHong Zhang bnzi += nlnk; 434558cb9c82SHong Zhang /* add received col data into lnk */ 434651a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 434755d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 43483e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 43493e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 43503e06a4e6SHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 43513e06a4e6SHong Zhang bnzi += nlnk; 43523e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 43533e06a4e6SHong Zhang } 435458cb9c82SHong Zhang } 4355bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 435658cb9c82SHong Zhang 435758cb9c82SHong Zhang /* if free space is not available, make more free space */ 435858cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 43594238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 436058cb9c82SHong Zhang nspacedouble++; 436158cb9c82SHong Zhang } 436258cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4363be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4364bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4365bcc1bcd5SHong Zhang 436658cb9c82SHong Zhang current_space->array += bnzi; 436758cb9c82SHong Zhang current_space->local_used += bnzi; 436858cb9c82SHong Zhang current_space->local_remaining -= bnzi; 436958cb9c82SHong Zhang 437058cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 437158cb9c82SHong Zhang } 4372bcc1bcd5SHong Zhang 4373bcc1bcd5SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 4374bcc1bcd5SHong Zhang 4375b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4376a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4377be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4378409913e3SHong Zhang 4379bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4380bcc1bcd5SHong Zhang /*---------------------------------------*/ 4381f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 438254b84b50SHong Zhang if (n==PETSC_DECIDE) { 4383f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 438454b84b50SHong Zhang } else { 4385f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 438654b84b50SHong Zhang } 4387bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4388bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4389bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 439058cb9c82SHong Zhang 43916abd8857SHong Zhang /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */ 43926abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 4393affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 4394affca5deSHong Zhang merge->bi = bi; 4395affca5deSHong Zhang merge->bj = bj; 439602c68681SHong Zhang merge->buf_ri = buf_ri; 439702c68681SHong Zhang merge->buf_rj = buf_rj; 4398de0260b3SHong Zhang merge->coi = PETSC_NULL; 4399de0260b3SHong Zhang merge->coj = PETSC_NULL; 4400de0260b3SHong Zhang merge->owners_co = PETSC_NULL; 4401affca5deSHong Zhang 4402affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 4403776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 4404776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 4405affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 4406affca5deSHong Zhang *mpimat = B_mpi; 440738f152feSBarry Smith 440838f152feSBarry Smith ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 44094ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 4410e5f2cdd8SHong Zhang PetscFunctionReturn(0); 4411e5f2cdd8SHong Zhang } 441225616d81SHong Zhang 441338f152feSBarry Smith #undef __FUNCT__ 441438f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI" 4415be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 441655d1abb9SHong Zhang { 441755d1abb9SHong Zhang PetscErrorCode ierr; 441855d1abb9SHong Zhang 441955d1abb9SHong Zhang PetscFunctionBegin; 44204ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 442155d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 442255d1abb9SHong Zhang ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 442355d1abb9SHong Zhang } 442455d1abb9SHong Zhang ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr); 44254ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 442655d1abb9SHong Zhang PetscFunctionReturn(0); 442755d1abb9SHong Zhang } 44284ebed01fSBarry Smith 442925616d81SHong Zhang #undef __FUNCT__ 443025616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat" 4431bc08b0f1SBarry Smith /*@ 443232fba14fSHong Zhang MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows 443325616d81SHong Zhang 443432fba14fSHong Zhang Not Collective 443525616d81SHong Zhang 443625616d81SHong Zhang Input Parameters: 443725616d81SHong Zhang + A - the matrix 443825616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 443925616d81SHong Zhang 444025616d81SHong Zhang Output Parameter: 444125616d81SHong Zhang . A_loc - the local sequential matrix generated 444225616d81SHong Zhang 444325616d81SHong Zhang Level: developer 444425616d81SHong Zhang 444525616d81SHong Zhang @*/ 4446be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 444725616d81SHong Zhang { 444825616d81SHong Zhang PetscErrorCode ierr; 444901b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 445001b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 445101b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 4452a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 4453a77337e4SBarry Smith PetscScalar *ca; 4454d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 44555a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 445625616d81SHong Zhang 445725616d81SHong Zhang PetscFunctionBegin; 44584ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 445901b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4460dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 4461dea91ad1SHong Zhang ci[0] = 0; 446201b7ae99SHong Zhang for (i=0; i<am; i++){ 4463dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 446401b7ae99SHong Zhang } 4465dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 4466dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 4467dea91ad1SHong Zhang k = 0; 446801b7ae99SHong Zhang for (i=0; i<am; i++) { 44695a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 44705a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 447101b7ae99SHong Zhang /* off-diagonal portion of A */ 44725a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 44735a7d977cSHong Zhang col = cmap[*bj]; 44745a7d977cSHong Zhang if (col >= cstart) break; 44755a7d977cSHong Zhang cj[k] = col; bj++; 44765a7d977cSHong Zhang ca[k++] = *ba++; 44775a7d977cSHong Zhang } 44785a7d977cSHong Zhang /* diagonal portion of A */ 44795a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 44805a7d977cSHong Zhang cj[k] = cstart + *aj++; 44815a7d977cSHong Zhang ca[k++] = *aa++; 44825a7d977cSHong Zhang } 44835a7d977cSHong Zhang /* off-diagonal portion of A */ 44845a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 44855a7d977cSHong Zhang cj[k] = cmap[*bj++]; 44865a7d977cSHong Zhang ca[k++] = *ba++; 44875a7d977cSHong Zhang } 448825616d81SHong Zhang } 4489dea91ad1SHong Zhang /* put together the new matrix */ 4490d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 4491dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4492dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 4493dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 4494e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 4495e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 4496dea91ad1SHong Zhang mat->nonew = 0; 44975a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 44985a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 4499a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 45005a7d977cSHong Zhang for (i=0; i<am; i++) { 45015a7d977cSHong Zhang /* off-diagonal portion of A */ 45025a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 45035a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 45045a7d977cSHong Zhang col = cmap[*bj]; 45055a7d977cSHong Zhang if (col >= cstart) break; 4506a77337e4SBarry Smith *cam++ = *ba++; bj++; 45075a7d977cSHong Zhang } 45085a7d977cSHong Zhang /* diagonal portion of A */ 4509ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 4510a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 45115a7d977cSHong Zhang /* off-diagonal portion of A */ 4512f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 4513a77337e4SBarry Smith *cam++ = *ba++; bj++; 4514f33d1a9aSHong Zhang } 45155a7d977cSHong Zhang } 45165a7d977cSHong Zhang } else { 45175a7d977cSHong Zhang SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 451825616d81SHong Zhang } 451901b7ae99SHong Zhang 45204ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 452125616d81SHong Zhang PetscFunctionReturn(0); 452225616d81SHong Zhang } 452325616d81SHong Zhang 452432fba14fSHong Zhang #undef __FUNCT__ 452532fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed" 452632fba14fSHong Zhang /*@C 452732fba14fSHong Zhang MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns 452832fba14fSHong Zhang 452932fba14fSHong Zhang Not Collective 453032fba14fSHong Zhang 453132fba14fSHong Zhang Input Parameters: 453232fba14fSHong Zhang + A - the matrix 453332fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 453432fba14fSHong Zhang - row, col - index sets of rows and columns to extract (or PETSC_NULL) 453532fba14fSHong Zhang 453632fba14fSHong Zhang Output Parameter: 453732fba14fSHong Zhang . A_loc - the local sequential matrix generated 453832fba14fSHong Zhang 453932fba14fSHong Zhang Level: developer 454032fba14fSHong Zhang 454132fba14fSHong Zhang @*/ 4542be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 454332fba14fSHong Zhang { 454432fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 454532fba14fSHong Zhang PetscErrorCode ierr; 454632fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 454732fba14fSHong Zhang IS isrowa,iscola; 454832fba14fSHong Zhang Mat *aloc; 454932fba14fSHong Zhang 455032fba14fSHong Zhang PetscFunctionBegin; 45514ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 455232fba14fSHong Zhang if (!row){ 4553d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 455432fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 455532fba14fSHong Zhang } else { 455632fba14fSHong Zhang isrowa = *row; 455732fba14fSHong Zhang } 455832fba14fSHong Zhang if (!col){ 4559d0f46423SBarry Smith start = A->cmap->rstart; 456032fba14fSHong Zhang cmap = a->garray; 4561d0f46423SBarry Smith nzA = a->A->cmap->n; 4562d0f46423SBarry Smith nzB = a->B->cmap->n; 456332fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 456432fba14fSHong Zhang ncols = 0; 456532fba14fSHong Zhang for (i=0; i<nzB; i++) { 456632fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 456732fba14fSHong Zhang else break; 456832fba14fSHong Zhang } 456932fba14fSHong Zhang imark = i; 457032fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 457132fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 457232fba14fSHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr); 457332fba14fSHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 457432fba14fSHong Zhang } else { 457532fba14fSHong Zhang iscola = *col; 457632fba14fSHong Zhang } 457732fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX){ 457832fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 457932fba14fSHong Zhang aloc[0] = *A_loc; 458032fba14fSHong Zhang } 458132fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 458232fba14fSHong Zhang *A_loc = aloc[0]; 458332fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 458432fba14fSHong Zhang if (!row){ 458532fba14fSHong Zhang ierr = ISDestroy(isrowa);CHKERRQ(ierr); 458632fba14fSHong Zhang } 458732fba14fSHong Zhang if (!col){ 458832fba14fSHong Zhang ierr = ISDestroy(iscola);CHKERRQ(ierr); 458932fba14fSHong Zhang } 45904ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 459132fba14fSHong Zhang PetscFunctionReturn(0); 459232fba14fSHong Zhang } 459332fba14fSHong Zhang 459425616d81SHong Zhang #undef __FUNCT__ 459525616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 459625616d81SHong Zhang /*@C 459732fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 459825616d81SHong Zhang 459925616d81SHong Zhang Collective on Mat 460025616d81SHong Zhang 460125616d81SHong Zhang Input Parameters: 4602e240928fSHong Zhang + A,B - the matrices in mpiaij format 460325616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 460425616d81SHong Zhang - rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL) 460525616d81SHong Zhang 460625616d81SHong Zhang Output Parameter: 460725616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 4608d0f46423SBarry Smith . brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows 460925616d81SHong Zhang - B_seq - the sequential matrix generated 461025616d81SHong Zhang 461125616d81SHong Zhang Level: developer 461225616d81SHong Zhang 461325616d81SHong Zhang @*/ 4614be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq) 461525616d81SHong Zhang { 4616899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 461725616d81SHong Zhang PetscErrorCode ierr; 4618b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 461925616d81SHong Zhang IS isrowb,iscolb; 462025616d81SHong Zhang Mat *bseq; 462125616d81SHong Zhang 462225616d81SHong Zhang PetscFunctionBegin; 4623d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4624d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 462525616d81SHong Zhang } 46264ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 462725616d81SHong Zhang 462825616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4629d0f46423SBarry Smith start = A->cmap->rstart; 463025616d81SHong Zhang cmap = a->garray; 4631d0f46423SBarry Smith nzA = a->A->cmap->n; 4632d0f46423SBarry Smith nzB = a->B->cmap->n; 4633b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 463425616d81SHong Zhang ncols = 0; 46350390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 463625616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 463725616d81SHong Zhang else break; 463825616d81SHong Zhang } 463925616d81SHong Zhang imark = i; 46400390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 46410390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 464225616d81SHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr); 464325616d81SHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 464425616d81SHong Zhang *brstart = imark; 4645d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 464625616d81SHong Zhang } else { 464725616d81SHong Zhang if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 464825616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 464925616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 465025616d81SHong Zhang bseq[0] = *B_seq; 465125616d81SHong Zhang } 465225616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 465325616d81SHong Zhang *B_seq = bseq[0]; 465425616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 465525616d81SHong Zhang if (!rowb){ 465625616d81SHong Zhang ierr = ISDestroy(isrowb);CHKERRQ(ierr); 465725616d81SHong Zhang } else { 465825616d81SHong Zhang *rowb = isrowb; 465925616d81SHong Zhang } 466025616d81SHong Zhang if (!colb){ 466125616d81SHong Zhang ierr = ISDestroy(iscolb);CHKERRQ(ierr); 466225616d81SHong Zhang } else { 466325616d81SHong Zhang *colb = iscolb; 466425616d81SHong Zhang } 46654ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 466625616d81SHong Zhang PetscFunctionReturn(0); 466725616d81SHong Zhang } 4668429d309bSHong Zhang 4669a61c8c0fSHong Zhang #undef __FUNCT__ 4670a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols" 4671429d309bSHong Zhang /*@C 4672429d309bSHong Zhang MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 467301b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 4674429d309bSHong Zhang 4675429d309bSHong Zhang Collective on Mat 4676429d309bSHong Zhang 4677429d309bSHong Zhang Input Parameters: 4678429d309bSHong Zhang + A,B - the matrices in mpiaij format 467987025532SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 468087025532SHong Zhang . startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL) 468187025532SHong Zhang - bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL) 4682429d309bSHong Zhang 4683429d309bSHong Zhang Output Parameter: 468487025532SHong Zhang + B_oth - the sequential matrix generated 4685429d309bSHong Zhang 4686429d309bSHong Zhang Level: developer 4687429d309bSHong Zhang 4688429d309bSHong Zhang @*/ 4689dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth) 4690429d309bSHong Zhang { 4691a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 4692429d309bSHong Zhang PetscErrorCode ierr; 4693899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 469487025532SHong Zhang Mat_SeqAIJ *b_oth; 4695a6b2eed2SHong Zhang VecScatter ctx=a->Mvctx; 46967adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)ctx)->comm; 46977adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 4698d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 4699dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 4700dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 4701e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 4702910ba992SMatthew Knepley MPI_Request *rwaits = PETSC_NULL,*swaits = PETSC_NULL; 470387025532SHong Zhang MPI_Status *sstatus,rstatus; 4704aa5bb8c0SSatish Balay PetscMPIInt jj; 4705e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 4706ba8c8a56SBarry Smith PetscScalar *vals; 4707429d309bSHong Zhang 4708429d309bSHong Zhang PetscFunctionBegin; 4709d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4710d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 4711429d309bSHong Zhang } 47124ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4713a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 4714a6b2eed2SHong Zhang 4715a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 4716a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 4717e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 4718e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 4719a6b2eed2SHong Zhang nrecvs = gen_from->n; 4720a6b2eed2SHong Zhang nsends = gen_to->n; 4721d7ee0231SBarry Smith 4722d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 4723a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 4724a6b2eed2SHong Zhang sstarts = gen_to->starts; 4725a6b2eed2SHong Zhang sprocs = gen_to->procs; 4726a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 4727e42f35eeSHong Zhang sbs = gen_to->bs; 4728e42f35eeSHong Zhang rstarts = gen_from->starts; 4729e42f35eeSHong Zhang rprocs = gen_from->procs; 4730e42f35eeSHong Zhang rbs = gen_from->bs; 4731429d309bSHong Zhang 4732dea91ad1SHong Zhang if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 4733429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4734a6b2eed2SHong Zhang /* i-array */ 4735a6b2eed2SHong Zhang /*---------*/ 4736a6b2eed2SHong Zhang /* post receives */ 4737a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4738e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4739e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 474087025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4741429d309bSHong Zhang } 4742a6b2eed2SHong Zhang 4743a6b2eed2SHong Zhang /* pack the outgoing message */ 474487025532SHong Zhang ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr); 4745a6b2eed2SHong Zhang rstartsj = sstartsj + nsends +1; 4746a6b2eed2SHong Zhang sstartsj[0] = 0; rstartsj[0] = 0; 4747a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 4748a6b2eed2SHong Zhang k = 0; 4749a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4750e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 4751e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 475287025532SHong Zhang for (j=0; j<nrows; j++) { 4753d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 4754e42f35eeSHong Zhang for (l=0; l<sbs; l++){ 4755e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */ 4756e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 4757e42f35eeSHong Zhang len += ncols; 4758e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 4759e42f35eeSHong Zhang } 4760a6b2eed2SHong Zhang k++; 4761429d309bSHong Zhang } 4762e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4763dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 4764429d309bSHong Zhang } 476587025532SHong Zhang /* recvs and sends of i-array are completed */ 476687025532SHong Zhang i = nrecvs; 476787025532SHong Zhang while (i--) { 4768aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 476987025532SHong Zhang } 47700c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4771e42f35eeSHong Zhang 4772a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 4773a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 4774a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 4775a6b2eed2SHong Zhang 477687025532SHong Zhang /* create i-array of B_oth */ 477787025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 477887025532SHong Zhang b_othi[0] = 0; 4779a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 4780a6b2eed2SHong Zhang k = 0; 4781a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4782fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4783e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 478487025532SHong Zhang for (j=0; j<nrows; j++) { 478587025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 4786a6b2eed2SHong Zhang len += rowlen[j]; k++; 4787a6b2eed2SHong Zhang } 4788dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 4789a6b2eed2SHong Zhang } 4790a6b2eed2SHong Zhang 479187025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 479287025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 4793dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 4794a6b2eed2SHong Zhang 479587025532SHong Zhang /* j-array */ 479687025532SHong Zhang /*---------*/ 4797a6b2eed2SHong Zhang /* post receives of j-array */ 4798a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 479987025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 480087025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4801a6b2eed2SHong Zhang } 4802e42f35eeSHong Zhang 4803e42f35eeSHong Zhang /* pack the outgoing message j-array */ 4804a6b2eed2SHong Zhang k = 0; 4805a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4806e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 4807a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 480887025532SHong Zhang for (j=0; j<nrows; j++) { 4809d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4810e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4811e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4812a6b2eed2SHong Zhang for (l=0; l<ncols; l++){ 4813a6b2eed2SHong Zhang *bufJ++ = cols[l]; 481487025532SHong Zhang } 4815e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4816e42f35eeSHong Zhang } 481787025532SHong Zhang } 481887025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 481987025532SHong Zhang } 482087025532SHong Zhang 482187025532SHong Zhang /* recvs and sends of j-array are completed */ 482287025532SHong Zhang i = nrecvs; 482387025532SHong Zhang while (i--) { 4824aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 482587025532SHong Zhang } 48260c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 482787025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 482887025532SHong Zhang sstartsj = *startsj; 482987025532SHong Zhang rstartsj = sstartsj + nsends +1; 483087025532SHong Zhang bufa = *bufa_ptr; 483187025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 483287025532SHong Zhang b_otha = b_oth->a; 483387025532SHong Zhang } else { 483487025532SHong Zhang SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 483587025532SHong Zhang } 483687025532SHong Zhang 483787025532SHong Zhang /* a-array */ 483887025532SHong Zhang /*---------*/ 483987025532SHong Zhang /* post receives of a-array */ 484087025532SHong Zhang for (i=0; i<nrecvs; i++){ 484187025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 484287025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 484387025532SHong Zhang } 4844e42f35eeSHong Zhang 4845e42f35eeSHong Zhang /* pack the outgoing message a-array */ 484687025532SHong Zhang k = 0; 484787025532SHong Zhang for (i=0; i<nsends; i++){ 4848e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 484987025532SHong Zhang bufA = bufa+sstartsj[i]; 485087025532SHong Zhang for (j=0; j<nrows; j++) { 4851d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4852e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4853e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 485487025532SHong Zhang for (l=0; l<ncols; l++){ 4855a6b2eed2SHong Zhang *bufA++ = vals[l]; 4856a6b2eed2SHong Zhang } 4857e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 4858e42f35eeSHong Zhang } 4859a6b2eed2SHong Zhang } 486087025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4861a6b2eed2SHong Zhang } 486287025532SHong Zhang /* recvs and sends of a-array are completed */ 486387025532SHong Zhang i = nrecvs; 486487025532SHong Zhang while (i--) { 4865aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 486687025532SHong Zhang } 48670c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4868d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 4869a6b2eed2SHong Zhang 487087025532SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4871a6b2eed2SHong Zhang /* put together the new matrix */ 4872d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 4873a6b2eed2SHong Zhang 4874a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4875a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 487687025532SHong Zhang b_oth = (Mat_SeqAIJ *)(*B_oth)->data; 4877e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 4878e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 487987025532SHong Zhang b_oth->nonew = 0; 4880a6b2eed2SHong Zhang 4881a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 4882dea91ad1SHong Zhang if (!startsj || !bufa_ptr){ 4883dea91ad1SHong Zhang ierr = PetscFree(sstartsj);CHKERRQ(ierr); 4884dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 4885dea91ad1SHong Zhang } else { 488687025532SHong Zhang *startsj = sstartsj; 488787025532SHong Zhang *bufa_ptr = bufa; 488887025532SHong Zhang } 4889dea91ad1SHong Zhang } 48904ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4891429d309bSHong Zhang PetscFunctionReturn(0); 4892429d309bSHong Zhang } 4893ccd8e176SBarry Smith 489443eb5e2fSMatthew Knepley #undef __FUNCT__ 489543eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 489643eb5e2fSMatthew Knepley /*@C 489743eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 489843eb5e2fSMatthew Knepley 489943eb5e2fSMatthew Knepley Not Collective 490043eb5e2fSMatthew Knepley 490143eb5e2fSMatthew Knepley Input Parameters: 490243eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 490343eb5e2fSMatthew Knepley 490443eb5e2fSMatthew Knepley Output Parameter: 490543eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 490643eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 490743eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 490843eb5e2fSMatthew Knepley 490943eb5e2fSMatthew Knepley Level: developer 491043eb5e2fSMatthew Knepley 491143eb5e2fSMatthew Knepley @*/ 491243eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE) 491343eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 491443eb5e2fSMatthew Knepley #else 491543eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 491643eb5e2fSMatthew Knepley #endif 491743eb5e2fSMatthew Knepley { 491843eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 491943eb5e2fSMatthew Knepley 492043eb5e2fSMatthew Knepley PetscFunctionBegin; 492143eb5e2fSMatthew Knepley PetscValidHeaderSpecific(A, MAT_COOKIE, 1); 492243eb5e2fSMatthew Knepley PetscValidPointer(lvec, 2) 492343eb5e2fSMatthew Knepley PetscValidPointer(colmap, 3) 492443eb5e2fSMatthew Knepley PetscValidPointer(multScatter, 4) 492543eb5e2fSMatthew Knepley a = (Mat_MPIAIJ *) A->data; 492643eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 492743eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 492843eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 492943eb5e2fSMatthew Knepley PetscFunctionReturn(0); 493043eb5e2fSMatthew Knepley } 493143eb5e2fSMatthew Knepley 493217667f90SBarry Smith EXTERN_C_BEGIN 49338cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*); 49348cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*); 493517667f90SBarry Smith EXTERN_C_END 493617667f90SBarry Smith 4937fc4dec0aSBarry Smith #undef __FUNCT__ 4938fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 4939fc4dec0aSBarry Smith /* 4940fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 4941fc4dec0aSBarry Smith 4942fc4dec0aSBarry Smith n p p 4943fc4dec0aSBarry Smith ( ) ( ) ( ) 4944fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 4945fc4dec0aSBarry Smith ( ) ( ) ( ) 4946fc4dec0aSBarry Smith 4947fc4dec0aSBarry Smith */ 4948fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 4949fc4dec0aSBarry Smith { 4950fc4dec0aSBarry Smith PetscErrorCode ierr; 4951fc4dec0aSBarry Smith Mat At,Bt,Ct; 4952fc4dec0aSBarry Smith 4953fc4dec0aSBarry Smith PetscFunctionBegin; 4954fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 4955fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 4956fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 4957fc4dec0aSBarry Smith ierr = MatDestroy(At);CHKERRQ(ierr); 4958fc4dec0aSBarry Smith ierr = MatDestroy(Bt);CHKERRQ(ierr); 4959fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 4960e5e4356aSBarry Smith ierr = MatDestroy(Ct);CHKERRQ(ierr); 4961fc4dec0aSBarry Smith PetscFunctionReturn(0); 4962fc4dec0aSBarry Smith } 4963fc4dec0aSBarry Smith 4964fc4dec0aSBarry Smith #undef __FUNCT__ 4965fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 4966fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 4967fc4dec0aSBarry Smith { 4968fc4dec0aSBarry Smith PetscErrorCode ierr; 4969d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 4970fc4dec0aSBarry Smith Mat Cmat; 4971fc4dec0aSBarry Smith 4972fc4dec0aSBarry Smith PetscFunctionBegin; 4973d0f46423SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 497439804f7cSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr); 4975fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4976fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 4977fc4dec0aSBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr); 497838556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 497938556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4980fc4dec0aSBarry Smith *C = Cmat; 4981fc4dec0aSBarry Smith PetscFunctionReturn(0); 4982fc4dec0aSBarry Smith } 4983fc4dec0aSBarry Smith 4984fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 4985fc4dec0aSBarry Smith #undef __FUNCT__ 4986fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 4987fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 4988fc4dec0aSBarry Smith { 4989fc4dec0aSBarry Smith PetscErrorCode ierr; 4990fc4dec0aSBarry Smith 4991fc4dec0aSBarry Smith PetscFunctionBegin; 4992fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX){ 4993fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 4994fc4dec0aSBarry Smith } 4995fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 4996fc4dec0aSBarry Smith PetscFunctionReturn(0); 4997fc4dec0aSBarry Smith } 4998fc4dec0aSBarry Smith 49995c9eb25fSBarry Smith EXTERN_C_BEGIN 5000611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 50015c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*); 5002611f576cSBarry Smith #endif 50033bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 50043bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 50053bf14a46SMatthew Knepley #endif 5006611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 50075c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 5008611f576cSBarry Smith #endif 5009611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 50105c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*); 5011611f576cSBarry Smith #endif 50125c9eb25fSBarry Smith EXTERN_C_END 50135c9eb25fSBarry Smith 5014ccd8e176SBarry Smith /*MC 5015ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 5016ccd8e176SBarry Smith 5017ccd8e176SBarry Smith Options Database Keys: 5018ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 5019ccd8e176SBarry Smith 5020ccd8e176SBarry Smith Level: beginner 5021ccd8e176SBarry Smith 5022175b88e8SBarry Smith .seealso: MatCreateMPIAIJ() 5023ccd8e176SBarry Smith M*/ 5024ccd8e176SBarry Smith 5025ccd8e176SBarry Smith EXTERN_C_BEGIN 5026ccd8e176SBarry Smith #undef __FUNCT__ 5027ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 5028be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B) 5029ccd8e176SBarry Smith { 5030ccd8e176SBarry Smith Mat_MPIAIJ *b; 5031ccd8e176SBarry Smith PetscErrorCode ierr; 5032ccd8e176SBarry Smith PetscMPIInt size; 5033ccd8e176SBarry Smith 5034ccd8e176SBarry Smith PetscFunctionBegin; 50357adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr); 5036ccd8e176SBarry Smith 503738f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 5038ccd8e176SBarry Smith B->data = (void*)b; 5039ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 5040d0f46423SBarry Smith B->rmap->bs = 1; 5041ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 5042ccd8e176SBarry Smith B->mapping = 0; 5043ccd8e176SBarry Smith 5044ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 5045ccd8e176SBarry Smith b->size = size; 50467adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 5047ccd8e176SBarry Smith 5048ccd8e176SBarry Smith /* build cache for off array entries formed */ 50497adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 5050ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 5051ccd8e176SBarry Smith b->colmap = 0; 5052ccd8e176SBarry Smith b->garray = 0; 5053ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 5054ccd8e176SBarry Smith 5055ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 5056ccd8e176SBarry Smith b->lvec = PETSC_NULL; 5057ccd8e176SBarry Smith b->Mvctx = PETSC_NULL; 5058ccd8e176SBarry Smith 5059ccd8e176SBarry Smith /* stuff for MatGetRow() */ 5060ccd8e176SBarry Smith b->rowindices = 0; 5061ccd8e176SBarry Smith b->rowvalues = 0; 5062ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 5063ccd8e176SBarry Smith 5064611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 5065ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_spooles_C", 50665c9eb25fSBarry Smith "MatGetFactor_mpiaij_spooles", 50675c9eb25fSBarry Smith MatGetFactor_mpiaij_spooles);CHKERRQ(ierr); 5068611f576cSBarry Smith #endif 5069611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5070ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mumps_C", 50715c9eb25fSBarry Smith "MatGetFactor_mpiaij_mumps", 50725c9eb25fSBarry Smith MatGetFactor_mpiaij_mumps);CHKERRQ(ierr); 5073611f576cSBarry Smith #endif 50743bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 5075ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_pastix_C", 50763bf14a46SMatthew Knepley "MatGetFactor_mpiaij_pastix", 50773bf14a46SMatthew Knepley MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 50783bf14a46SMatthew Knepley #endif 5079611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 5080ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_superlu_dist_C", 50815c9eb25fSBarry Smith "MatGetFactor_mpiaij_superlu_dist", 50825c9eb25fSBarry Smith MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5083611f576cSBarry Smith #endif 5084ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 5085ccd8e176SBarry Smith "MatStoreValues_MPIAIJ", 5086ccd8e176SBarry Smith MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5087ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 5088ccd8e176SBarry Smith "MatRetrieveValues_MPIAIJ", 5089ccd8e176SBarry Smith MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5090ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 5091ccd8e176SBarry Smith "MatGetDiagonalBlock_MPIAIJ", 5092ccd8e176SBarry Smith MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5093ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C", 5094ccd8e176SBarry Smith "MatIsTranspose_MPIAIJ", 5095ccd8e176SBarry Smith MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5096ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C", 5097ccd8e176SBarry Smith "MatMPIAIJSetPreallocation_MPIAIJ", 5098ccd8e176SBarry Smith MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5099ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C", 5100ccd8e176SBarry Smith "MatMPIAIJSetPreallocationCSR_MPIAIJ", 5101ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5102ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C", 5103ccd8e176SBarry Smith "MatDiagonalScaleLocal_MPIAIJ", 5104ccd8e176SBarry Smith MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 510517667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C", 510617667f90SBarry Smith "MatConvert_MPIAIJ_MPICSRPERM", 510717667f90SBarry Smith MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr); 510817667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C", 510917667f90SBarry Smith "MatConvert_MPIAIJ_MPICRL", 511017667f90SBarry Smith MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr); 5111471cc821SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpisbaij_C", 5112471cc821SHong Zhang "MatConvert_MPIAIJ_MPISBAIJ", 5113471cc821SHong Zhang MatConvert_MPIAIJ_MPISBAIJ);CHKERRQ(ierr); 5114fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C", 5115fc4dec0aSBarry Smith "MatMatMult_MPIDense_MPIAIJ", 5116fc4dec0aSBarry Smith MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5117fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C", 5118fc4dec0aSBarry Smith "MatMatMultSymbolic_MPIDense_MPIAIJ", 5119fc4dec0aSBarry Smith MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5120fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C", 5121fc4dec0aSBarry Smith "MatMatMultNumeric_MPIDense_MPIAIJ", 5122fc4dec0aSBarry Smith MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 512317667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5124ccd8e176SBarry Smith PetscFunctionReturn(0); 5125ccd8e176SBarry Smith } 5126ccd8e176SBarry Smith EXTERN_C_END 512781824310SBarry Smith 512803bfb495SBarry Smith #undef __FUNCT__ 512903bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 513058d36128SBarry Smith /*@ 513103bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 513203bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 513303bfb495SBarry Smith 513403bfb495SBarry Smith Collective on MPI_Comm 513503bfb495SBarry Smith 513603bfb495SBarry Smith Input Parameters: 513703bfb495SBarry Smith + comm - MPI communicator 513803bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 513903bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 514003bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 514103bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 514203bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 514303bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 514403bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 514503bfb495SBarry Smith . j - column indices 514603bfb495SBarry Smith . a - matrix values 514703bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 514803bfb495SBarry Smith . oj - column indices 514903bfb495SBarry Smith - oa - matrix values 515003bfb495SBarry Smith 515103bfb495SBarry Smith Output Parameter: 515203bfb495SBarry Smith . mat - the matrix 515303bfb495SBarry Smith 515403bfb495SBarry Smith Level: advanced 515503bfb495SBarry Smith 515603bfb495SBarry Smith Notes: 515703bfb495SBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. 515803bfb495SBarry Smith 515903bfb495SBarry Smith The i and j indices are 0 based 516003bfb495SBarry Smith 516103bfb495SBarry Smith See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 516203bfb495SBarry Smith 51637b55108eSBarry Smith This sets local rows and cannot be used to set off-processor values. 51647b55108eSBarry Smith 51657b55108eSBarry Smith You cannot later use MatSetValues() to change values in this matrix. 516603bfb495SBarry Smith 516703bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 516803bfb495SBarry Smith 516903bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 51708d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays() 517103bfb495SBarry Smith @*/ 51728d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[], 517303bfb495SBarry Smith PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 517403bfb495SBarry Smith { 517503bfb495SBarry Smith PetscErrorCode ierr; 517603bfb495SBarry Smith Mat_MPIAIJ *maij; 517703bfb495SBarry Smith 517803bfb495SBarry Smith PetscFunctionBegin; 517903bfb495SBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 518003bfb495SBarry Smith if (i[0]) { 518103bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 518203bfb495SBarry Smith } 518303bfb495SBarry Smith if (oi[0]) { 518403bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 518503bfb495SBarry Smith } 518603bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 518703bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 518803bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 518903bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 51908d7a6e47SBarry Smith maij->donotstash = PETSC_TRUE; 51918d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 519203bfb495SBarry Smith 519326283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr); 519426283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr); 519526283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr); 519626283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr); 519703bfb495SBarry Smith 519803bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5199d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 520003bfb495SBarry Smith 52018d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52028d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52038d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52048d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52058d7a6e47SBarry Smith 520603bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 520703bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 520803bfb495SBarry Smith PetscFunctionReturn(0); 520903bfb495SBarry Smith } 521003bfb495SBarry Smith 521181824310SBarry Smith /* 521281824310SBarry Smith Special version for direct calls from Fortran 521381824310SBarry Smith */ 521481824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 521581824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 521681824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 521781824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 521881824310SBarry Smith #endif 521981824310SBarry Smith 522081824310SBarry Smith /* Change these macros so can be used in void function */ 522181824310SBarry Smith #undef CHKERRQ 52227adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr) 522381824310SBarry Smith #undef SETERRQ2 52247adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr) 522581824310SBarry Smith #undef SETERRQ 52267adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr) 522781824310SBarry Smith 522881824310SBarry Smith EXTERN_C_BEGIN 522981824310SBarry Smith #undef __FUNCT__ 523081824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 52311f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 523281824310SBarry Smith { 523381824310SBarry Smith Mat mat = *mmat; 523481824310SBarry Smith PetscInt m = *mm, n = *mn; 523581824310SBarry Smith InsertMode addv = *maddv; 523681824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 523781824310SBarry Smith PetscScalar value; 523881824310SBarry Smith PetscErrorCode ierr; 5239899cda47SBarry Smith 5240d9e2c085SLisandro Dalcin ierr = MatPreallocated(mat);CHKERRQ(ierr); 524181824310SBarry Smith if (mat->insertmode == NOT_SET_VALUES) { 524281824310SBarry Smith mat->insertmode = addv; 524381824310SBarry Smith } 524481824310SBarry Smith #if defined(PETSC_USE_DEBUG) 524581824310SBarry Smith else if (mat->insertmode != addv) { 524681824310SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 524781824310SBarry Smith } 524881824310SBarry Smith #endif 524981824310SBarry Smith { 5250d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5251d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 525281824310SBarry Smith PetscTruth roworiented = aij->roworiented; 525381824310SBarry Smith 525481824310SBarry Smith /* Some Variables required in the macro */ 525581824310SBarry Smith Mat A = aij->A; 525681824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 525781824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5258dd6ea824SBarry Smith MatScalar *aa = a->a; 525981824310SBarry Smith PetscTruth ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE); 526081824310SBarry Smith Mat B = aij->B; 526181824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5262d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5263dd6ea824SBarry Smith MatScalar *ba = b->a; 526481824310SBarry Smith 526581824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 526681824310SBarry Smith PetscInt nonew = a->nonew; 5267dd6ea824SBarry Smith MatScalar *ap1,*ap2; 526881824310SBarry Smith 526981824310SBarry Smith PetscFunctionBegin; 527081824310SBarry Smith for (i=0; i<m; i++) { 527181824310SBarry Smith if (im[i] < 0) continue; 527281824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5273d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 527481824310SBarry Smith #endif 527581824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 527681824310SBarry Smith row = im[i] - rstart; 527781824310SBarry Smith lastcol1 = -1; 527881824310SBarry Smith rp1 = aj + ai[row]; 527981824310SBarry Smith ap1 = aa + ai[row]; 528081824310SBarry Smith rmax1 = aimax[row]; 528181824310SBarry Smith nrow1 = ailen[row]; 528281824310SBarry Smith low1 = 0; 528381824310SBarry Smith high1 = nrow1; 528481824310SBarry Smith lastcol2 = -1; 528581824310SBarry Smith rp2 = bj + bi[row]; 528681824310SBarry Smith ap2 = ba + bi[row]; 528781824310SBarry Smith rmax2 = bimax[row]; 528881824310SBarry Smith nrow2 = bilen[row]; 528981824310SBarry Smith low2 = 0; 529081824310SBarry Smith high2 = nrow2; 529181824310SBarry Smith 529281824310SBarry Smith for (j=0; j<n; j++) { 529381824310SBarry Smith if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 529481824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 529581824310SBarry Smith if (in[j] >= cstart && in[j] < cend){ 529681824310SBarry Smith col = in[j] - cstart; 529781824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 529881824310SBarry Smith } else if (in[j] < 0) continue; 529981824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5300d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 530181824310SBarry Smith #endif 530281824310SBarry Smith else { 530381824310SBarry Smith if (mat->was_assembled) { 530481824310SBarry Smith if (!aij->colmap) { 530581824310SBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 530681824310SBarry Smith } 530781824310SBarry Smith #if defined (PETSC_USE_CTABLE) 530881824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 530981824310SBarry Smith col--; 531081824310SBarry Smith #else 531181824310SBarry Smith col = aij->colmap[in[j]] - 1; 531281824310SBarry Smith #endif 531381824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 531481824310SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 531581824310SBarry Smith col = in[j]; 531681824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 531781824310SBarry Smith B = aij->B; 531881824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 531981824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 532081824310SBarry Smith rp2 = bj + bi[row]; 532181824310SBarry Smith ap2 = ba + bi[row]; 532281824310SBarry Smith rmax2 = bimax[row]; 532381824310SBarry Smith nrow2 = bilen[row]; 532481824310SBarry Smith low2 = 0; 532581824310SBarry Smith high2 = nrow2; 5326d0f46423SBarry Smith bm = aij->B->rmap->n; 532781824310SBarry Smith ba = b->a; 532881824310SBarry Smith } 532981824310SBarry Smith } else col = in[j]; 533081824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 533181824310SBarry Smith } 533281824310SBarry Smith } 533381824310SBarry Smith } else { 533481824310SBarry Smith if (!aij->donotstash) { 533581824310SBarry Smith if (roworiented) { 53363b024144SHong Zhang ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 533781824310SBarry Smith } else { 53383b024144SHong Zhang ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 533981824310SBarry Smith } 534081824310SBarry Smith } 534181824310SBarry Smith } 534281824310SBarry Smith }} 534381824310SBarry Smith PetscFunctionReturnVoid(); 534481824310SBarry Smith } 534581824310SBarry Smith EXTERN_C_END 534603bfb495SBarry Smith 5347