1be1d678aSKris Buschelman #define PETSCMAT_DLL 28a729477SBarry Smith 37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h" /*I "petscmat.h" I*/ 47c4f633dSBarry Smith #include "../src/inline/spops.h" 58a729477SBarry Smith 6dd6ea824SBarry Smith #undef __FUNCT__ 7dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 8dd6ea824SBarry Smith /* 9dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 10dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 11dd6ea824SBarry Smith 12dd6ea824SBarry Smith Only for square matrices 13dd6ea824SBarry Smith */ 14dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 15dd6ea824SBarry Smith { 16dd6ea824SBarry Smith PetscMPIInt rank,size; 17dd6ea824SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld; 18dd6ea824SBarry Smith PetscErrorCode ierr; 19dd6ea824SBarry Smith Mat mat; 20dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 21dd6ea824SBarry Smith PetscMPIInt tag; 22dd6ea824SBarry Smith MPI_Status status; 23dd6ea824SBarry Smith PetscTruth aij; 24dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 25dd6ea824SBarry Smith 26dd6ea824SBarry Smith PetscFunctionBegin; 27dd6ea824SBarry Smith CHKMEMQ; 28dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 29dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 30dd6ea824SBarry Smith if (!rank) { 31dd6ea824SBarry Smith ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 32dd6ea824SBarry Smith if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 33dd6ea824SBarry Smith } 34dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 35dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 36dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 37dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 38dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 39dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 40dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 41dd6ea824SBarry Smith rowners[0] = 0; 42dd6ea824SBarry Smith for (i=2; i<=size; i++) { 43dd6ea824SBarry Smith rowners[i] += rowners[i-1]; 44dd6ea824SBarry Smith } 45dd6ea824SBarry Smith rstart = rowners[rank]; 46dd6ea824SBarry Smith rend = rowners[rank+1]; 47dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 48dd6ea824SBarry Smith if (!rank) { 49dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 50dd6ea824SBarry Smith /* send row lengths to all processors */ 51dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 52dd6ea824SBarry Smith for (i=1; i<size; i++) { 53dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 54dd6ea824SBarry Smith } 55dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 56dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 57dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 58dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 59dd6ea824SBarry Smith jj = 0; 60dd6ea824SBarry Smith for (i=0; i<m; i++) { 61dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 62dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 63dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 64dd6ea824SBarry Smith jj++; 65dd6ea824SBarry Smith } 66dd6ea824SBarry Smith } 67dd6ea824SBarry Smith /* send column indices to other processes */ 68dd6ea824SBarry Smith for (i=1; i<size; i++) { 69dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 70dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 71dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 72dd6ea824SBarry Smith } 73dd6ea824SBarry Smith 74dd6ea824SBarry Smith /* send numerical values to other processes */ 75dd6ea824SBarry Smith for (i=1; i<size; i++) { 76dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 77dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 78dd6ea824SBarry Smith } 79dd6ea824SBarry Smith gmataa = gmata->a; 80dd6ea824SBarry Smith gmataj = gmata->j; 81dd6ea824SBarry Smith 82dd6ea824SBarry Smith } else { 83dd6ea824SBarry Smith /* receive row lengths */ 84dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 85dd6ea824SBarry Smith /* receive column indices */ 86dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 87dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 88dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 89dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 90dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 91dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 92dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 93dd6ea824SBarry Smith jj = 0; 94dd6ea824SBarry Smith for (i=0; i<m; i++) { 95dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 96dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 97dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 98dd6ea824SBarry Smith jj++; 99dd6ea824SBarry Smith } 100dd6ea824SBarry Smith } 101dd6ea824SBarry Smith /* receive numerical values */ 102dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 103dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 104dd6ea824SBarry Smith } 105dd6ea824SBarry Smith /* set preallocation */ 106dd6ea824SBarry Smith for (i=0; i<m; i++) { 107dd6ea824SBarry Smith dlens[i] -= olens[i]; 108dd6ea824SBarry Smith } 109dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 110dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 111dd6ea824SBarry Smith 112dd6ea824SBarry Smith for (i=0; i<m; i++) { 113dd6ea824SBarry Smith dlens[i] += olens[i]; 114dd6ea824SBarry Smith } 115dd6ea824SBarry Smith cnt = 0; 116dd6ea824SBarry Smith for (i=0; i<m; i++) { 117dd6ea824SBarry Smith row = rstart + i; 118dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 119dd6ea824SBarry Smith cnt += dlens[i]; 120dd6ea824SBarry Smith } 121dd6ea824SBarry Smith if (rank) { 122dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 123dd6ea824SBarry Smith } 124dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 125dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 126dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 127dd6ea824SBarry Smith *inmat = mat; 128dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 129dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 130dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 131dd6ea824SBarry Smith mat = *inmat; 132dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 133dd6ea824SBarry Smith if (!rank) { 134dd6ea824SBarry Smith /* send numerical values to other processes */ 135dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 136dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 137dd6ea824SBarry Smith gmataa = gmata->a; 138dd6ea824SBarry Smith for (i=1; i<size; i++) { 139dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 140dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 141dd6ea824SBarry Smith } 142dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 143dd6ea824SBarry Smith } else { 144dd6ea824SBarry Smith /* receive numerical values from process 0*/ 145dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 146dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 147dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 148dd6ea824SBarry Smith } 149dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 150dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 151dd6ea824SBarry Smith ad = Ad->a; 152dd6ea824SBarry Smith ao = Ao->a; 153d0f46423SBarry Smith if (mat->rmap->n) { 154dd6ea824SBarry Smith i = 0; 155dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 156dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 157dd6ea824SBarry Smith } 158d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 159dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 160dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 161dd6ea824SBarry Smith } 162dd6ea824SBarry Smith i--; 163d0f46423SBarry Smith if (mat->rmap->n) { 164dd6ea824SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 165dd6ea824SBarry Smith } 166dd6ea824SBarry Smith if (rank) { 167dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 168dd6ea824SBarry Smith } 169dd6ea824SBarry Smith } 170dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 171dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 172dd6ea824SBarry Smith CHKMEMQ; 173dd6ea824SBarry Smith PetscFunctionReturn(0); 174dd6ea824SBarry Smith } 175dd6ea824SBarry Smith 1760f5bd95cSBarry Smith /* 1770f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 1789e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 1790f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 1800f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 1810f5bd95cSBarry Smith has an order N integer array but is fast to acess. 1829e25ed09SBarry Smith */ 1834a2ae208SSatish Balay #undef __FUNCT__ 1844a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private" 185dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat) 1869e25ed09SBarry Smith { 18744a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1886849ba73SBarry Smith PetscErrorCode ierr; 189d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 190dbb450caSBarry Smith 1913a40ed3dSBarry Smith PetscFunctionBegin; 192aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 193273d9f13SBarry Smith ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr); 194b1fc9764SSatish Balay for (i=0; i<n; i++){ 1950f5bd95cSBarry Smith ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr); 196b1fc9764SSatish Balay } 197b1fc9764SSatish Balay #else 198d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 199d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 200d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 201905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 202b1fc9764SSatish Balay #endif 2033a40ed3dSBarry Smith PetscFunctionReturn(0); 2049e25ed09SBarry Smith } 2059e25ed09SBarry Smith 206085a36d4SBarry Smith 2070520107fSSatish Balay #define CHUNKSIZE 15 20830770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 2090520107fSSatish Balay { \ 2107cd84e04SBarry Smith if (col <= lastcol1) low1 = 0; else high1 = nrow1; \ 211fd3458f5SBarry Smith lastcol1 = col;\ 212fd3458f5SBarry Smith while (high1-low1 > 5) { \ 213fd3458f5SBarry Smith t = (low1+high1)/2; \ 214fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 215fd3458f5SBarry Smith else low1 = t; \ 216ba4e3ef2SSatish Balay } \ 217fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 218fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 219fd3458f5SBarry Smith if (rp1[_i] == col) { \ 220fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 221fd3458f5SBarry Smith else ap1[_i] = value; \ 22230770e4dSSatish Balay goto a_noinsert; \ 2230520107fSSatish Balay } \ 2240520107fSSatish Balay } \ 225e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 226e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 227085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 228421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 229669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 2300520107fSSatish Balay /* shift up all the later entries in this row */ \ 2310520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 232fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 233fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 2340520107fSSatish Balay } \ 235fd3458f5SBarry Smith rp1[_i] = col; \ 236fd3458f5SBarry Smith ap1[_i] = value; \ 23730770e4dSSatish Balay a_noinsert: ; \ 238fd3458f5SBarry Smith ailen[row] = nrow1; \ 2390520107fSSatish Balay } 2400a198c4cSBarry Smith 241085a36d4SBarry Smith 24230770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 24330770e4dSSatish Balay { \ 2447cd84e04SBarry Smith if (col <= lastcol2) low2 = 0; else high2 = nrow2; \ 245fd3458f5SBarry Smith lastcol2 = col;\ 246fd3458f5SBarry Smith while (high2-low2 > 5) { \ 247fd3458f5SBarry Smith t = (low2+high2)/2; \ 248fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 249fd3458f5SBarry Smith else low2 = t; \ 250ba4e3ef2SSatish Balay } \ 251fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 252fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 253fd3458f5SBarry Smith if (rp2[_i] == col) { \ 254fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 255fd3458f5SBarry Smith else ap2[_i] = value; \ 25630770e4dSSatish Balay goto b_noinsert; \ 25730770e4dSSatish Balay } \ 25830770e4dSSatish Balay } \ 259e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 260e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 261085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 262421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 263669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 26430770e4dSSatish Balay /* shift up all the later entries in this row */ \ 26530770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 266fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 267fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 26830770e4dSSatish Balay } \ 269fd3458f5SBarry Smith rp2[_i] = col; \ 270fd3458f5SBarry Smith ap2[_i] = value; \ 27130770e4dSSatish Balay b_noinsert: ; \ 272fd3458f5SBarry Smith bilen[row] = nrow2; \ 27330770e4dSSatish Balay } 27430770e4dSSatish Balay 2754a2ae208SSatish Balay #undef __FUNCT__ 2762fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 2772fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 2782fd7e33dSBarry Smith { 2792fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 2802fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 2812fd7e33dSBarry Smith PetscErrorCode ierr; 2822fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 2832fd7e33dSBarry Smith 2842fd7e33dSBarry Smith PetscFunctionBegin; 2852fd7e33dSBarry Smith /* code only works for square matrices A */ 2862fd7e33dSBarry Smith 2872fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 2882fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 2892fd7e33dSBarry Smith row = row - diag; 2902fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 2912fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 2922fd7e33dSBarry Smith } 2932fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 2942fd7e33dSBarry Smith 2952fd7e33dSBarry Smith /* diagonal part */ 2962fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 2972fd7e33dSBarry Smith 2982fd7e33dSBarry Smith /* right of diagonal part */ 2992fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 3002fd7e33dSBarry Smith PetscFunctionReturn(0); 3012fd7e33dSBarry Smith } 3022fd7e33dSBarry Smith 3032fd7e33dSBarry Smith #undef __FUNCT__ 3044a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 305b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 3068a729477SBarry Smith { 30744a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 30887828ca2SBarry Smith PetscScalar value; 309dfbe8321SBarry Smith PetscErrorCode ierr; 310d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 311d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 312273d9f13SBarry Smith PetscTruth roworiented = aij->roworiented; 3138a729477SBarry Smith 3140520107fSSatish Balay /* Some Variables required in the macro */ 3154ee7247eSSatish Balay Mat A = aij->A; 3164ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 31757809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 318a77337e4SBarry Smith MatScalar *aa = a->a; 319edb03aefSBarry Smith PetscTruth ignorezeroentries = a->ignorezeroentries; 32030770e4dSSatish Balay Mat B = aij->B; 32130770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 322d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 323a77337e4SBarry Smith MatScalar *ba = b->a; 32430770e4dSSatish Balay 325fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 326fd3458f5SBarry Smith PetscInt nonew = a->nonew; 327a77337e4SBarry Smith MatScalar *ap1,*ap2; 3284ee7247eSSatish Balay 3293a40ed3dSBarry Smith PetscFunctionBegin; 3308a729477SBarry Smith for (i=0; i<m; i++) { 3315ef9f2a5SBarry Smith if (im[i] < 0) continue; 3322515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 333d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 3340a198c4cSBarry Smith #endif 3354b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 3364b0e389bSBarry Smith row = im[i] - rstart; 337fd3458f5SBarry Smith lastcol1 = -1; 338fd3458f5SBarry Smith rp1 = aj + ai[row]; 339fd3458f5SBarry Smith ap1 = aa + ai[row]; 340fd3458f5SBarry Smith rmax1 = aimax[row]; 341fd3458f5SBarry Smith nrow1 = ailen[row]; 342fd3458f5SBarry Smith low1 = 0; 343fd3458f5SBarry Smith high1 = nrow1; 344fd3458f5SBarry Smith lastcol2 = -1; 345fd3458f5SBarry Smith rp2 = bj + bi[row]; 346d498b1e9SBarry Smith ap2 = ba + bi[row]; 347fd3458f5SBarry Smith rmax2 = bimax[row]; 348d498b1e9SBarry Smith nrow2 = bilen[row]; 349fd3458f5SBarry Smith low2 = 0; 350fd3458f5SBarry Smith high2 = nrow2; 351fd3458f5SBarry Smith 3521eb62cbbSBarry Smith for (j=0; j<n; j++) { 35316371a99SBarry Smith if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0; 354abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 355fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend){ 356fd3458f5SBarry Smith col = in[j] - cstart; 35730770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 358273d9f13SBarry Smith } else if (in[j] < 0) continue; 3592515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 360d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 3610a198c4cSBarry Smith #endif 3621eb62cbbSBarry Smith else { 363227d817aSBarry Smith if (mat->was_assembled) { 364905e6a2fSBarry Smith if (!aij->colmap) { 365905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 366905e6a2fSBarry Smith } 367aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 3680f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 369fa46199cSSatish Balay col--; 370b1fc9764SSatish Balay #else 371905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 372b1fc9764SSatish Balay #endif 373ec8511deSBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 3742493cbb0SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 3754b0e389bSBarry Smith col = in[j]; 3769bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 377f9508a3cSSatish Balay B = aij->B; 378f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 379e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 380d498b1e9SBarry Smith rp2 = bj + bi[row]; 381d498b1e9SBarry Smith ap2 = ba + bi[row]; 382d498b1e9SBarry Smith rmax2 = bimax[row]; 383d498b1e9SBarry Smith nrow2 = bilen[row]; 384d498b1e9SBarry Smith low2 = 0; 385d498b1e9SBarry Smith high2 = nrow2; 386d0f46423SBarry Smith bm = aij->B->rmap->n; 387f9508a3cSSatish Balay ba = b->a; 388d6dfbf8fSBarry Smith } 389c48de900SBarry Smith } else col = in[j]; 39030770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 3911eb62cbbSBarry Smith } 3921eb62cbbSBarry Smith } 3935ef9f2a5SBarry Smith } else { 39490f02eecSBarry Smith if (!aij->donotstash) { 395d36fbae8SSatish Balay if (roworiented) { 3965b8514ebSBarry Smith if (ignorezeroentries && v[i*n] == 0.0) continue; 3978798bf22SSatish Balay ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr); 398d36fbae8SSatish Balay } else { 3995b8514ebSBarry Smith if (ignorezeroentries && v[i] == 0.0) continue; 4008798bf22SSatish Balay ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr); 4014b0e389bSBarry Smith } 4021eb62cbbSBarry Smith } 4038a729477SBarry Smith } 40490f02eecSBarry Smith } 4053a40ed3dSBarry Smith PetscFunctionReturn(0); 4068a729477SBarry Smith } 4078a729477SBarry Smith 4084a2ae208SSatish Balay #undef __FUNCT__ 4094a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 410b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 411b49de8d1SLois Curfman McInnes { 412b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 413dfbe8321SBarry Smith PetscErrorCode ierr; 414d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 415d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 416b49de8d1SLois Curfman McInnes 4173a40ed3dSBarry Smith PetscFunctionBegin; 418b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 41997e567efSBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 420d0f46423SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 421b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 422b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 423b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 42497e567efSBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 425d0f46423SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 426b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend){ 427b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 428b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 429fa852ad4SSatish Balay } else { 430905e6a2fSBarry Smith if (!aij->colmap) { 431905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 432905e6a2fSBarry Smith } 433aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 4340f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 435fa46199cSSatish Balay col --; 436b1fc9764SSatish Balay #else 437905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 438b1fc9764SSatish Balay #endif 439e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 440d9d09a02SSatish Balay else { 441b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 442b49de8d1SLois Curfman McInnes } 443b49de8d1SLois Curfman McInnes } 444b49de8d1SLois Curfman McInnes } 445a8c6a408SBarry Smith } else { 44629bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Only local values currently supported"); 447b49de8d1SLois Curfman McInnes } 448b49de8d1SLois Curfman McInnes } 4493a40ed3dSBarry Smith PetscFunctionReturn(0); 450b49de8d1SLois Curfman McInnes } 451bc5ccf88SSatish Balay 4524a2ae208SSatish Balay #undef __FUNCT__ 4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 455bc5ccf88SSatish Balay { 456bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 457dfbe8321SBarry Smith PetscErrorCode ierr; 458b1d57f15SBarry Smith PetscInt nstash,reallocs; 459bc5ccf88SSatish Balay InsertMode addv; 460bc5ccf88SSatish Balay 461bc5ccf88SSatish Balay PetscFunctionBegin; 462bc5ccf88SSatish Balay if (aij->donotstash) { 463bc5ccf88SSatish Balay PetscFunctionReturn(0); 464bc5ccf88SSatish Balay } 465bc5ccf88SSatish Balay 466bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 4677adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 468bc5ccf88SSatish Balay if (addv == (ADD_VALUES|INSERT_VALUES)) { 46929bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 470bc5ccf88SSatish Balay } 471bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 472bc5ccf88SSatish Balay 473d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 4748798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 475ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 476bc5ccf88SSatish Balay PetscFunctionReturn(0); 477bc5ccf88SSatish Balay } 478bc5ccf88SSatish Balay 4794a2ae208SSatish Balay #undef __FUNCT__ 4804a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 481dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 482bc5ccf88SSatish Balay { 483bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48491c97fd4SSatish Balay Mat_SeqAIJ *a=(Mat_SeqAIJ *)aij->A->data; 4856849ba73SBarry Smith PetscErrorCode ierr; 486b1d57f15SBarry Smith PetscMPIInt n; 487b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 488e44c0bd4SBarry Smith PetscInt *row,*col; 489e44c0bd4SBarry Smith PetscTruth other_disassembled; 49087828ca2SBarry Smith PetscScalar *val; 491bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 492bc5ccf88SSatish Balay 49391c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */ 494bc5ccf88SSatish Balay PetscFunctionBegin; 495bc5ccf88SSatish Balay if (!aij->donotstash) { 496a2d1c673SSatish Balay while (1) { 4978798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 498a2d1c673SSatish Balay if (!flg) break; 499a2d1c673SSatish Balay 500bc5ccf88SSatish Balay for (i=0; i<n;) { 501bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 502bc5ccf88SSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 503bc5ccf88SSatish Balay if (j < n) ncols = j-i; 504bc5ccf88SSatish Balay else ncols = n-i; 505bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 506bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 507bc5ccf88SSatish Balay i = j; 508bc5ccf88SSatish Balay } 509bc5ccf88SSatish Balay } 5108798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 511bc5ccf88SSatish Balay } 5122f53aa61SHong Zhang a->compressedrow.use = PETSC_FALSE; 513bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 514bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 515bc5ccf88SSatish Balay 516bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 517bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 518bc5ccf88SSatish Balay /* 519bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 520bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 521bc5ccf88SSatish Balay */ 522bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 5237adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 524bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 525bc5ccf88SSatish Balay ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 526ad59fb31SSatish Balay } 527ad59fb31SSatish Balay } 528bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 529bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 530bc5ccf88SSatish Balay } 5314e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 53291c97fd4SSatish Balay ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */ 533bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 534bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 535bc5ccf88SSatish Balay 536606d414cSSatish Balay ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 537606d414cSSatish Balay aij->rowvalues = 0; 538a30b2313SHong Zhang 539a30b2313SHong Zhang /* used by MatAXPY() */ 54091c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 54191c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 542a30b2313SHong Zhang 543bc5ccf88SSatish Balay PetscFunctionReturn(0); 544bc5ccf88SSatish Balay } 545bc5ccf88SSatish Balay 5464a2ae208SSatish Balay #undef __FUNCT__ 5474a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 548dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 5491eb62cbbSBarry Smith { 55044a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 551dfbe8321SBarry Smith PetscErrorCode ierr; 5523a40ed3dSBarry Smith 5533a40ed3dSBarry Smith PetscFunctionBegin; 55478b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 55578b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 5563a40ed3dSBarry Smith PetscFunctionReturn(0); 5571eb62cbbSBarry Smith } 5581eb62cbbSBarry Smith 5594a2ae208SSatish Balay #undef __FUNCT__ 5604a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 561f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag) 5621eb62cbbSBarry Smith { 56344a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 5646849ba73SBarry Smith PetscErrorCode ierr; 5657adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 566d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 567b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 568b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 569b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 570d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 5717adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 5721eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 5731eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 5746543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5756543fbbaSBarry Smith PetscTruth found = PETSC_FALSE; 5766543fbbaSBarry Smith #endif 5771eb62cbbSBarry Smith 5783a40ed3dSBarry Smith PetscFunctionBegin; 5791eb62cbbSBarry Smith /* first count number of contributors to each processor */ 580b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 581b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 582b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 5836543fbbaSBarry Smith j = 0; 5841eb62cbbSBarry Smith for (i=0; i<N; i++) { 5856543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 5866543fbbaSBarry Smith lastidx = idx; 5876543fbbaSBarry Smith for (; j<size; j++) { 5881eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 5896543fbbaSBarry Smith nprocs[2*j]++; 5906543fbbaSBarry Smith nprocs[2*j+1] = 1; 5916543fbbaSBarry Smith owner[i] = j; 5926543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5936543fbbaSBarry Smith found = PETSC_TRUE; 5946543fbbaSBarry Smith #endif 5956543fbbaSBarry Smith break; 5961eb62cbbSBarry Smith } 5971eb62cbbSBarry Smith } 5986543fbbaSBarry Smith #if defined(PETSC_DEBUG) 59929bbc08cSBarry Smith if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 6006543fbbaSBarry Smith found = PETSC_FALSE; 6016543fbbaSBarry Smith #endif 6021eb62cbbSBarry Smith } 603c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 6041eb62cbbSBarry Smith 6051eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 606c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 6071eb62cbbSBarry Smith 6081eb62cbbSBarry Smith /* post receives: */ 609b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 610b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 6111eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 612b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 6131eb62cbbSBarry Smith } 6141eb62cbbSBarry Smith 6151eb62cbbSBarry Smith /* do sends: 6161eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 6171eb62cbbSBarry Smith the ith processor 6181eb62cbbSBarry Smith */ 619b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 620b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 621b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 6221eb62cbbSBarry Smith starts[0] = 0; 623c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6241eb62cbbSBarry Smith for (i=0; i<N; i++) { 6251eb62cbbSBarry Smith svalues[starts[owner[i]]++] = rows[i]; 6261eb62cbbSBarry Smith } 6271eb62cbbSBarry Smith 6281eb62cbbSBarry Smith starts[0] = 0; 629c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6301eb62cbbSBarry Smith count = 0; 63117699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 632c1dc657dSBarry Smith if (nprocs[2*i+1]) { 633b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 6341eb62cbbSBarry Smith } 6351eb62cbbSBarry Smith } 636606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 6371eb62cbbSBarry Smith 63817699dbbSLois Curfman McInnes base = owners[rank]; 6391eb62cbbSBarry Smith 6401eb62cbbSBarry Smith /* wait on receives */ 641b1d57f15SBarry Smith ierr = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr); 6421eb62cbbSBarry Smith source = lens + nrecvs; 6431eb62cbbSBarry Smith count = nrecvs; slen = 0; 6441eb62cbbSBarry Smith while (count) { 645ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 6461eb62cbbSBarry Smith /* unpack receives into our local space */ 647b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 648d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 649d6dfbf8fSBarry Smith lens[imdex] = n; 6501eb62cbbSBarry Smith slen += n; 6511eb62cbbSBarry Smith count--; 6521eb62cbbSBarry Smith } 653606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 6541eb62cbbSBarry Smith 6551eb62cbbSBarry Smith /* move the data into the send scatter */ 656b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 6571eb62cbbSBarry Smith count = 0; 6581eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 6591eb62cbbSBarry Smith values = rvalues + i*nmax; 6601eb62cbbSBarry Smith for (j=0; j<lens[i]; j++) { 6611eb62cbbSBarry Smith lrows[count++] = values[j] - base; 6621eb62cbbSBarry Smith } 6631eb62cbbSBarry Smith } 664606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 665606d414cSSatish Balay ierr = PetscFree(lens);CHKERRQ(ierr); 666606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 667606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 6681eb62cbbSBarry Smith 6691eb62cbbSBarry Smith /* actually zap the local rows */ 6706eb55b6aSBarry Smith /* 6716eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 672a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 6736eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 6746eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 6756eb55b6aSBarry Smith 676f4df32b1SMatthew Knepley Contributed by: Matthew Knepley 6776eb55b6aSBarry Smith */ 678e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 679f4df32b1SMatthew Knepley ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr); 680d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 681f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr); 682f4df32b1SMatthew Knepley } else if (diag != 0.0) { 683f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 684fa46199cSSatish Balay if (((Mat_SeqAIJ*)l->A->data)->nonew) { 68529bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\ 686512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 6876525c446SSatish Balay } 688e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 689e2d53e46SBarry Smith row = lrows[i] + rstart; 690f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 691e2d53e46SBarry Smith } 692e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 693e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 6946eb55b6aSBarry Smith } else { 695f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 6966eb55b6aSBarry Smith } 697606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 69872dacd9aSBarry Smith 6991eb62cbbSBarry Smith /* wait on sends */ 7001eb62cbbSBarry Smith if (nsends) { 701b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 702ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 703606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 7041eb62cbbSBarry Smith } 705606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 706606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 7071eb62cbbSBarry Smith 7083a40ed3dSBarry Smith PetscFunctionReturn(0); 7091eb62cbbSBarry Smith } 7101eb62cbbSBarry Smith 7114a2ae208SSatish Balay #undef __FUNCT__ 7124a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 713dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 7141eb62cbbSBarry Smith { 715416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 716dfbe8321SBarry Smith PetscErrorCode ierr; 717b1d57f15SBarry Smith PetscInt nt; 718416022c9SBarry Smith 7193a40ed3dSBarry Smith PetscFunctionBegin; 720a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 721d0f46423SBarry Smith if (nt != A->cmap->n) { 722d0f46423SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 723fbd6ef76SBarry Smith } 724ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 725f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 726ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 727f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 7283a40ed3dSBarry Smith PetscFunctionReturn(0); 7291eb62cbbSBarry Smith } 7301eb62cbbSBarry Smith 7314a2ae208SSatish Balay #undef __FUNCT__ 7324a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 733dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 734da3a660dSBarry Smith { 735416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 736dfbe8321SBarry Smith PetscErrorCode ierr; 7373a40ed3dSBarry Smith 7383a40ed3dSBarry Smith PetscFunctionBegin; 739ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 740f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 741ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 742f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 7433a40ed3dSBarry Smith PetscFunctionReturn(0); 744da3a660dSBarry Smith } 745da3a660dSBarry Smith 7464a2ae208SSatish Balay #undef __FUNCT__ 7474a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 748dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 749da3a660dSBarry Smith { 750416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 751dfbe8321SBarry Smith PetscErrorCode ierr; 752a5ff213dSBarry Smith PetscTruth merged; 753da3a660dSBarry Smith 7543a40ed3dSBarry Smith PetscFunctionBegin; 755a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 756da3a660dSBarry Smith /* do nondiagonal part */ 7577c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 758a5ff213dSBarry Smith if (!merged) { 759da3a660dSBarry Smith /* send it on its way */ 760ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 761da3a660dSBarry Smith /* do local part */ 7627c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 763da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 764a5ff213dSBarry Smith /* added in yy until the next line, */ 765ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 766a5ff213dSBarry Smith } else { 767a5ff213dSBarry Smith /* do local part */ 768a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 769a5ff213dSBarry Smith /* send it on its way */ 770ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 771a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 772ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 773a5ff213dSBarry Smith } 7743a40ed3dSBarry Smith PetscFunctionReturn(0); 775da3a660dSBarry Smith } 776da3a660dSBarry Smith 777cd0d46ebSvictorle EXTERN_C_BEGIN 778cd0d46ebSvictorle #undef __FUNCT__ 7795fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 78013c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f) 781cd0d46ebSvictorle { 7824f423910Svictorle MPI_Comm comm; 783cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ *) Amat->data, *Bij; 78466501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 785cd0d46ebSvictorle IS Me,Notme; 7866849ba73SBarry Smith PetscErrorCode ierr; 787b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 788b1d57f15SBarry Smith PetscMPIInt size; 789cd0d46ebSvictorle 790cd0d46ebSvictorle PetscFunctionBegin; 79142e5f5b4Svictorle 79242e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 79366501d38Svictorle Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A; 7945485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 795cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 7964f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 797b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 798b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 79942e5f5b4Svictorle 80042e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 801cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 802cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 803b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 804cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 805cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 806268466fbSBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr); 807268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 808268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 80966501d38Svictorle Aoff = Aoffs[0]; 810268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 81166501d38Svictorle Boff = Boffs[0]; 8125485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 81366501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 81466501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 81542e5f5b4Svictorle ierr = ISDestroy(Me);CHKERRQ(ierr); 81642e5f5b4Svictorle ierr = ISDestroy(Notme);CHKERRQ(ierr); 81742e5f5b4Svictorle 818cd0d46ebSvictorle PetscFunctionReturn(0); 819cd0d46ebSvictorle } 820cd0d46ebSvictorle EXTERN_C_END 821cd0d46ebSvictorle 8224a2ae208SSatish Balay #undef __FUNCT__ 8234a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 824dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 825da3a660dSBarry Smith { 826416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 827dfbe8321SBarry Smith PetscErrorCode ierr; 828da3a660dSBarry Smith 8293a40ed3dSBarry Smith PetscFunctionBegin; 830da3a660dSBarry Smith /* do nondiagonal part */ 8317c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 832da3a660dSBarry Smith /* send it on its way */ 833ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 834da3a660dSBarry Smith /* do local part */ 8357c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 836a5ff213dSBarry Smith /* receive remote parts */ 837ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 8383a40ed3dSBarry Smith PetscFunctionReturn(0); 839da3a660dSBarry Smith } 840da3a660dSBarry Smith 8411eb62cbbSBarry Smith /* 8421eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 8431eb62cbbSBarry Smith diagonal block 8441eb62cbbSBarry Smith */ 8454a2ae208SSatish Balay #undef __FUNCT__ 8464a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 847dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 8481eb62cbbSBarry Smith { 849dfbe8321SBarry Smith PetscErrorCode ierr; 850416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 8513a40ed3dSBarry Smith 8523a40ed3dSBarry Smith PetscFunctionBegin; 853d0f46423SBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 854d0f46423SBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) { 85529bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 8563a40ed3dSBarry Smith } 8573a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 8583a40ed3dSBarry Smith PetscFunctionReturn(0); 8591eb62cbbSBarry Smith } 8601eb62cbbSBarry Smith 8614a2ae208SSatish Balay #undef __FUNCT__ 8624a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 863f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 864052efed2SBarry Smith { 865052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 866dfbe8321SBarry Smith PetscErrorCode ierr; 8673a40ed3dSBarry Smith 8683a40ed3dSBarry Smith PetscFunctionBegin; 869f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 870f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 8713a40ed3dSBarry Smith PetscFunctionReturn(0); 872052efed2SBarry Smith } 873052efed2SBarry Smith 8744a2ae208SSatish Balay #undef __FUNCT__ 8754a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 876dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 8771eb62cbbSBarry Smith { 87844a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 879dfbe8321SBarry Smith PetscErrorCode ierr; 88083e2fdc7SBarry Smith 8813a40ed3dSBarry Smith PetscFunctionBegin; 882aa482453SBarry Smith #if defined(PETSC_USE_LOG) 883d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 884a5a9c739SBarry Smith #endif 8858798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 88678b31e54SBarry Smith ierr = MatDestroy(aij->A);CHKERRQ(ierr); 88778b31e54SBarry Smith ierr = MatDestroy(aij->B);CHKERRQ(ierr); 888aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 8899c666560SBarry Smith if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);} 890b1fc9764SSatish Balay #else 89105b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 892b1fc9764SSatish Balay #endif 89305b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 8947c922b88SBarry Smith if (aij->lvec) {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);} 8957c922b88SBarry Smith if (aij->Mvctx) {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);} 89605b42c5fSBarry Smith ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 8978aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 898606d414cSSatish Balay ierr = PetscFree(aij);CHKERRQ(ierr); 899901853e0SKris Buschelman 900dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 901901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 902901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 903901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 904901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr); 905901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 906ff69c46cSKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr); 907901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr); 9083a40ed3dSBarry Smith PetscFunctionReturn(0); 9091eb62cbbSBarry Smith } 910ee50ffe9SBarry Smith 9114a2ae208SSatish Balay #undef __FUNCT__ 9128e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 913dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 9148e2fed03SBarry Smith { 9158e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 9168e2fed03SBarry Smith Mat_SeqAIJ* A = (Mat_SeqAIJ*)aij->A->data; 9178e2fed03SBarry Smith Mat_SeqAIJ* B = (Mat_SeqAIJ*)aij->B->data; 9186849ba73SBarry Smith PetscErrorCode ierr; 91932dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 9206f69ff64SBarry Smith int fd; 921a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 922d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 9238e2fed03SBarry Smith PetscScalar *column_values; 9248e2fed03SBarry Smith 9258e2fed03SBarry Smith PetscFunctionBegin; 9267adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 9277adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 9288e2fed03SBarry Smith nz = A->nz + B->nz; 929958c9bccSBarry Smith if (!rank) { 9308e2fed03SBarry Smith header[0] = MAT_FILE_COOKIE; 931d0f46423SBarry Smith header[1] = mat->rmap->N; 932d0f46423SBarry Smith header[2] = mat->cmap->N; 9337adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 9348e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 9356f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9368e2fed03SBarry Smith /* get largest number of rows any processor has */ 937d0f46423SBarry Smith rlen = mat->rmap->n; 938d0f46423SBarry Smith range = mat->rmap->range; 9398e2fed03SBarry Smith for (i=1; i<size; i++) { 9408e2fed03SBarry Smith rlen = PetscMax(rlen,range[i+1] - range[i]); 9418e2fed03SBarry Smith } 9428e2fed03SBarry Smith } else { 9437adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 944d0f46423SBarry Smith rlen = mat->rmap->n; 9458e2fed03SBarry Smith } 9468e2fed03SBarry Smith 9478e2fed03SBarry Smith /* load up the local row counts */ 948b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 949d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9508e2fed03SBarry Smith row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 9518e2fed03SBarry Smith } 9528e2fed03SBarry Smith 9538e2fed03SBarry Smith /* store the row lengths to the file */ 954958c9bccSBarry Smith if (!rank) { 9558e2fed03SBarry Smith MPI_Status status; 956d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9578e2fed03SBarry Smith for (i=1; i<size; i++) { 9588e2fed03SBarry Smith rlen = range[i+1] - range[i]; 9597adad957SLisandro Dalcin ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9606f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9618e2fed03SBarry Smith } 9628e2fed03SBarry Smith } else { 963d0f46423SBarry Smith ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9648e2fed03SBarry Smith } 9658e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 9668e2fed03SBarry Smith 9678e2fed03SBarry Smith /* load up the local column indices */ 9688e2fed03SBarry Smith nzmax = nz; /* )th processor needs space a largest processor needs */ 9697adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 970b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 9718e2fed03SBarry Smith cnt = 0; 972d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9738e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 9748e2fed03SBarry Smith if ( (col = garray[B->j[j]]) > cstart) break; 9758e2fed03SBarry Smith column_indices[cnt++] = col; 9768e2fed03SBarry Smith } 9778e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 9788e2fed03SBarry Smith column_indices[cnt++] = A->j[k] + cstart; 9798e2fed03SBarry Smith } 9808e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 9818e2fed03SBarry Smith column_indices[cnt++] = garray[B->j[j]]; 9828e2fed03SBarry Smith } 9838e2fed03SBarry Smith } 98477431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 9858e2fed03SBarry Smith 9868e2fed03SBarry Smith /* store the column indices to the file */ 987958c9bccSBarry Smith if (!rank) { 9888e2fed03SBarry Smith MPI_Status status; 9896f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9908e2fed03SBarry Smith for (i=1; i<size; i++) { 9917adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 99277431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 9937adad957SLisandro Dalcin ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9946f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9958e2fed03SBarry Smith } 9968e2fed03SBarry Smith } else { 9977adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9987adad957SLisandro Dalcin ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9998e2fed03SBarry Smith } 10008e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 10018e2fed03SBarry Smith 10028e2fed03SBarry Smith /* load up the local column values */ 10038e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 10048e2fed03SBarry Smith cnt = 0; 1005d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 10068e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 10078e2fed03SBarry Smith if ( garray[B->j[j]] > cstart) break; 10088e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10098e2fed03SBarry Smith } 10108e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 10118e2fed03SBarry Smith column_values[cnt++] = A->a[k]; 10128e2fed03SBarry Smith } 10138e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 10148e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10158e2fed03SBarry Smith } 10168e2fed03SBarry Smith } 101777431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10188e2fed03SBarry Smith 10198e2fed03SBarry Smith /* store the column values to the file */ 1020958c9bccSBarry Smith if (!rank) { 10218e2fed03SBarry Smith MPI_Status status; 10226f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10238e2fed03SBarry Smith for (i=1; i<size; i++) { 10247adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 102577431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10267adad957SLisandro Dalcin ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10276f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10288e2fed03SBarry Smith } 10298e2fed03SBarry Smith } else { 10307adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10317adad957SLisandro Dalcin ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10328e2fed03SBarry Smith } 10338e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 10348e2fed03SBarry Smith PetscFunctionReturn(0); 10358e2fed03SBarry Smith } 10368e2fed03SBarry Smith 10378e2fed03SBarry Smith #undef __FUNCT__ 10384a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1039dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1040416022c9SBarry Smith { 104144a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1042dfbe8321SBarry Smith PetscErrorCode ierr; 104332dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1044d38fa0fbSBarry Smith PetscTruth isdraw,iascii,isbinary; 1045b0a32e0cSBarry Smith PetscViewer sviewer; 1046f3ef73ceSBarry Smith PetscViewerFormat format; 1047416022c9SBarry Smith 10483a40ed3dSBarry Smith PetscFunctionBegin; 1049fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 105032077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 10518e2fed03SBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 105232077d6dSBarry Smith if (iascii) { 1053b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1054456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 10554e220ebcSLois Curfman McInnes MatInfo info; 1056923f20ffSKris Buschelman PetscTruth inodes; 1057923f20ffSKris Buschelman 10587adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 1059888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 1060923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr); 1061923f20ffSKris Buschelman if (!inodes) { 106277431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1063d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10646831982aSBarry Smith } else { 106577431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1066d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10676831982aSBarry Smith } 1068888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 106977431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1070888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 107177431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1072b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 107307d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1074a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 10753a40ed3dSBarry Smith PetscFunctionReturn(0); 1076fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1077923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1078923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1079923f20ffSKris Buschelman if (inodes) { 1080923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1081d38fa0fbSBarry Smith } else { 1082d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1083d38fa0fbSBarry Smith } 10843a40ed3dSBarry Smith PetscFunctionReturn(0); 10854aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 10864aedb280SBarry Smith PetscFunctionReturn(0); 108708480c60SBarry Smith } 10888e2fed03SBarry Smith } else if (isbinary) { 10898e2fed03SBarry Smith if (size == 1) { 10907adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 10918e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 10928e2fed03SBarry Smith } else { 10938e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 10948e2fed03SBarry Smith } 10958e2fed03SBarry Smith PetscFunctionReturn(0); 10960f5bd95cSBarry Smith } else if (isdraw) { 1097b0a32e0cSBarry Smith PetscDraw draw; 109819bcc07fSBarry Smith PetscTruth isnull; 1099b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1100b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 110119bcc07fSBarry Smith } 110219bcc07fSBarry Smith 110317699dbbSLois Curfman McInnes if (size == 1) { 11047adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 110578b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11063a40ed3dSBarry Smith } else { 110795373324SBarry Smith /* assemble the entire matrix onto first processor. */ 110895373324SBarry Smith Mat A; 1109ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1110d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1111dd6ea824SBarry Smith MatScalar *a; 11122ee70a88SLois Curfman McInnes 111332a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 111490d69ab7SBarry Smith PetscTruth flg = PETSC_FALSE; 111532a366e4SMatthew Knepley 1116*0c235cafSBarry Smith ierr = PetscOptionsGetTruth(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr); 111732a366e4SMatthew Knepley if (!flg) { 111890d69ab7SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 111932a366e4SMatthew Knepley } 112032a366e4SMatthew Knepley } 11210805154bSBarry Smith 11227adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 112317699dbbSLois Curfman McInnes if (!rank) { 1124f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 11253a40ed3dSBarry Smith } else { 1126f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 112795373324SBarry Smith } 1128f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1129f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 1130f204ca49SKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 113152e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1132416022c9SBarry Smith 113395373324SBarry Smith /* copy over the A part */ 1134ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1135d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1136d0f46423SBarry Smith row = mat->rmap->rstart; 1137d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;} 113895373324SBarry Smith for (i=0; i<m; i++) { 1139416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 114095373324SBarry Smith row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 114195373324SBarry Smith } 11422ee70a88SLois Curfman McInnes aj = Aloc->j; 1143d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;} 114495373324SBarry Smith 114595373324SBarry Smith /* copy over the B part */ 1146ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1147d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1148d0f46423SBarry Smith row = mat->rmap->rstart; 1149b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1150b0a32e0cSBarry Smith ct = cols; 1151bfec09a0SHong Zhang for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];} 115295373324SBarry Smith for (i=0; i<m; i++) { 1153416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 115495373324SBarry Smith row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 115595373324SBarry Smith } 1156606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 11576d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 11586d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 115955843e3eSBarry Smith /* 116055843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1161b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 116255843e3eSBarry Smith */ 1163b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1164e03a110bSBarry Smith if (!rank) { 11657adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11666831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 116795373324SBarry Smith } 1168b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 116978b31e54SBarry Smith ierr = MatDestroy(A);CHKERRQ(ierr); 117095373324SBarry Smith } 11713a40ed3dSBarry Smith PetscFunctionReturn(0); 11721eb62cbbSBarry Smith } 11731eb62cbbSBarry Smith 11744a2ae208SSatish Balay #undef __FUNCT__ 11754a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1176dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1177416022c9SBarry Smith { 1178dfbe8321SBarry Smith PetscErrorCode ierr; 117932077d6dSBarry Smith PetscTruth iascii,isdraw,issocket,isbinary; 1180416022c9SBarry Smith 11813a40ed3dSBarry Smith PetscFunctionBegin; 118232077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 1183fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 1184fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 1185b0a32e0cSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr); 118632077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 11877b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 11885cd90555SBarry Smith } else { 118979a5c55eSBarry Smith SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name); 1190416022c9SBarry Smith } 11913a40ed3dSBarry Smith PetscFunctionReturn(0); 1192416022c9SBarry Smith } 1193416022c9SBarry Smith 11944a2ae208SSatish Balay #undef __FUNCT__ 11954a2ae208SSatish Balay #define __FUNCT__ "MatRelax_MPIAIJ" 1196b1d57f15SBarry Smith PetscErrorCode MatRelax_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 11978a729477SBarry Smith { 119844a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1199dfbe8321SBarry Smith PetscErrorCode ierr; 1200c14dc6b6SHong Zhang Vec bb1; 12018a729477SBarry Smith 12023a40ed3dSBarry Smith PetscFunctionBegin; 1203c14dc6b6SHong Zhang ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 12042798e883SHong Zhang 1205c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 1206da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 1207bd3bf7d3SHong Zhang ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr); 12082798e883SHong Zhang its--; 1209da3a660dSBarry Smith } 12102798e883SHong Zhang 12112798e883SHong Zhang while (its--) { 1212ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1213ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12142798e883SHong Zhang 1215c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1216efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1217c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12182798e883SHong Zhang 1219c14dc6b6SHong Zhang /* local sweep */ 122071f1c65dSBarry Smith ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr); 12212798e883SHong Zhang } 12223a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP){ 1223da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 1224c14dc6b6SHong Zhang ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12252798e883SHong Zhang its--; 1226da3a660dSBarry Smith } 12272798e883SHong Zhang while (its--) { 1228ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1229ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12302798e883SHong Zhang 1231c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1232efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1233c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1234c14dc6b6SHong Zhang 1235c14dc6b6SHong Zhang /* local sweep */ 123671f1c65dSBarry Smith ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12372798e883SHong Zhang } 12383a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){ 1239da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 1240c14dc6b6SHong Zhang ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12412798e883SHong Zhang its--; 1242da3a660dSBarry Smith } 12432798e883SHong Zhang while (its--) { 1244ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1245ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12462798e883SHong Zhang 1247c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1248efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1249c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12502798e883SHong Zhang 1251c14dc6b6SHong Zhang /* local sweep */ 125271f1c65dSBarry Smith ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12532798e883SHong Zhang } 12543a40ed3dSBarry Smith } else { 125529bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported"); 1256c16cb8f2SBarry Smith } 1257c14dc6b6SHong Zhang 1258c14dc6b6SHong Zhang ierr = VecDestroy(bb1);CHKERRQ(ierr); 12593a40ed3dSBarry Smith PetscFunctionReturn(0); 12608a729477SBarry Smith } 1261a66be287SLois Curfman McInnes 12624a2ae208SSatish Balay #undef __FUNCT__ 126342e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 126442e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 126542e855d1Svictor { 126642e855d1Svictor MPI_Comm comm,pcomm; 12675d0c19d7SBarry Smith PetscInt first,local_size,nrows; 12685d0c19d7SBarry Smith const PetscInt *rows; 1269dbf0e21dSBarry Smith PetscMPIInt size; 127042e855d1Svictor IS crowp,growp,irowp,lrowp,lcolp,icolp; 127142e855d1Svictor PetscErrorCode ierr; 127242e855d1Svictor 127342e855d1Svictor PetscFunctionBegin; 127442e855d1Svictor ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 127542e855d1Svictor /* make a collective version of 'rowp' */ 127642e855d1Svictor ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr); 127742e855d1Svictor if (pcomm==comm) { 127842e855d1Svictor crowp = rowp; 127942e855d1Svictor } else { 128042e855d1Svictor ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr); 128142e855d1Svictor ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr); 128242e855d1Svictor ierr = ISCreateGeneral(comm,nrows,rows,&crowp);CHKERRQ(ierr); 128342e855d1Svictor ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr); 128442e855d1Svictor } 128542e855d1Svictor /* collect the global row permutation and invert it */ 128642e855d1Svictor ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr); 128742e855d1Svictor ierr = ISSetPermutation(growp);CHKERRQ(ierr); 128842e855d1Svictor if (pcomm!=comm) { 128942e855d1Svictor ierr = ISDestroy(crowp);CHKERRQ(ierr); 129042e855d1Svictor } 129142e855d1Svictor ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr); 129242e855d1Svictor /* get the local target indices */ 129342e855d1Svictor ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr); 129442e855d1Svictor ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr); 129542e855d1Svictor ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr); 129642e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp);CHKERRQ(ierr); 129742e855d1Svictor ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr); 129842e855d1Svictor ierr = ISDestroy(irowp);CHKERRQ(ierr); 129942e855d1Svictor /* the column permutation is so much easier; 130042e855d1Svictor make a local version of 'colp' and invert it */ 130142e855d1Svictor ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr); 1302dbf0e21dSBarry Smith ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr); 1303dbf0e21dSBarry Smith if (size==1) { 130442e855d1Svictor lcolp = colp; 130542e855d1Svictor } else { 130642e855d1Svictor ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr); 130742e855d1Svictor ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr); 130842e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp);CHKERRQ(ierr); 130942e855d1Svictor } 1310dbf0e21dSBarry Smith ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 131142e855d1Svictor ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr); 131242e855d1Svictor ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 1313dbf0e21dSBarry Smith if (size>1) { 131442e855d1Svictor ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr); 131542e855d1Svictor ierr = ISDestroy(lcolp);CHKERRQ(ierr); 131642e855d1Svictor } 131742e855d1Svictor /* now we just get the submatrix */ 131842e855d1Svictor ierr = MatGetSubMatrix(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr); 131942e855d1Svictor /* clean up */ 132042e855d1Svictor ierr = ISDestroy(lrowp);CHKERRQ(ierr); 132142e855d1Svictor ierr = ISDestroy(icolp);CHKERRQ(ierr); 132242e855d1Svictor PetscFunctionReturn(0); 132342e855d1Svictor } 132442e855d1Svictor 132542e855d1Svictor #undef __FUNCT__ 13264a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1327dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1328a66be287SLois Curfman McInnes { 1329a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1330a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1331dfbe8321SBarry Smith PetscErrorCode ierr; 1332329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1333a66be287SLois Curfman McInnes 13343a40ed3dSBarry Smith PetscFunctionBegin; 13354e220ebcSLois Curfman McInnes info->block_size = 1.0; 13364e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 13374e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 13384e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 13394e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 13404e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 13414e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1342a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 13434e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 13444e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 13454e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 13464e220ebcSLois Curfman McInnes info->memory = isend[3]; 13474e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1348a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 13497adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 13504e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 13514e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 13524e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 13534e220ebcSLois Curfman McInnes info->memory = irecv[3]; 13544e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1355a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 13567adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 13574e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 13584e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 13594e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 13604e220ebcSLois Curfman McInnes info->memory = irecv[3]; 13614e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1362a66be287SLois Curfman McInnes } 13634e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 13644e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 13654e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 13664e220ebcSLois Curfman McInnes 13673a40ed3dSBarry Smith PetscFunctionReturn(0); 1368a66be287SLois Curfman McInnes } 1369a66be287SLois Curfman McInnes 13704a2ae208SSatish Balay #undef __FUNCT__ 13714a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 13724e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg) 1373c74985f6SBarry Smith { 1374c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1375dfbe8321SBarry Smith PetscErrorCode ierr; 1376c74985f6SBarry Smith 13773a40ed3dSBarry Smith PetscFunctionBegin; 137812c028f9SKris Buschelman switch (op) { 1379512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 138012c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 138128b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 138212c028f9SKris Buschelman case MAT_KEEP_ZEROED_ROWS: 138312c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 138412c028f9SKris Buschelman case MAT_USE_INODES: 138512c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 13864e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 13874e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 138812c028f9SKris Buschelman break; 138912c028f9SKris Buschelman case MAT_ROW_ORIENTED: 13904e0d8c25SBarry Smith a->roworiented = flg; 13914e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 13924e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 139312c028f9SKris Buschelman break; 13944e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1395290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 139612c028f9SKris Buschelman break; 139712c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 13987c922b88SBarry Smith a->donotstash = PETSC_TRUE; 139912c028f9SKris Buschelman break; 140077e54ba9SKris Buschelman case MAT_SYMMETRIC: 14014e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 140225f421beSHong Zhang break; 140377e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1404bf108f30SBarry Smith case MAT_HERMITIAN: 1405bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 14064e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 140777e54ba9SKris Buschelman break; 140812c028f9SKris Buschelman default: 1409ad86a440SBarry Smith SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op); 14103a40ed3dSBarry Smith } 14113a40ed3dSBarry Smith PetscFunctionReturn(0); 1412c74985f6SBarry Smith } 1413c74985f6SBarry Smith 14144a2ae208SSatish Balay #undef __FUNCT__ 14154a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1416b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 141739e00950SLois Curfman McInnes { 1418154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 141987828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 14206849ba73SBarry Smith PetscErrorCode ierr; 1421d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1422d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1423b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 142439e00950SLois Curfman McInnes 14253a40ed3dSBarry Smith PetscFunctionBegin; 1426abc0a331SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active"); 14277a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 14287a0afa10SBarry Smith 142970f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 14307a0afa10SBarry Smith /* 14317a0afa10SBarry Smith allocate enough space to hold information from the longest row. 14327a0afa10SBarry Smith */ 14337a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1434b1d57f15SBarry Smith PetscInt max = 1,tmp; 1435d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 14367a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 14377a0afa10SBarry Smith if (max < tmp) { max = tmp; } 14387a0afa10SBarry Smith } 1439b1d57f15SBarry Smith ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr); 1440b1d57f15SBarry Smith mat->rowindices = (PetscInt*)(mat->rowvalues + max); 14417a0afa10SBarry Smith } 14427a0afa10SBarry Smith 144329bbc08cSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows") 1444abc0e9e4SLois Curfman McInnes lrow = row - rstart; 144539e00950SLois Curfman McInnes 1446154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1447154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1448154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1449f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1450f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1451154123eaSLois Curfman McInnes nztot = nzA + nzB; 1452154123eaSLois Curfman McInnes 145370f0671dSBarry Smith cmap = mat->garray; 1454154123eaSLois Curfman McInnes if (v || idx) { 1455154123eaSLois Curfman McInnes if (nztot) { 1456154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1457b1d57f15SBarry Smith PetscInt imark = -1; 1458154123eaSLois Curfman McInnes if (v) { 145970f0671dSBarry Smith *v = v_p = mat->rowvalues; 146039e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 146170f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1462154123eaSLois Curfman McInnes else break; 1463154123eaSLois Curfman McInnes } 1464154123eaSLois Curfman McInnes imark = i; 146570f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 146670f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1467154123eaSLois Curfman McInnes } 1468154123eaSLois Curfman McInnes if (idx) { 146970f0671dSBarry Smith *idx = idx_p = mat->rowindices; 147070f0671dSBarry Smith if (imark > -1) { 147170f0671dSBarry Smith for (i=0; i<imark; i++) { 147270f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 147370f0671dSBarry Smith } 147470f0671dSBarry Smith } else { 1475154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 147670f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1477154123eaSLois Curfman McInnes else break; 1478154123eaSLois Curfman McInnes } 1479154123eaSLois Curfman McInnes imark = i; 148070f0671dSBarry Smith } 148170f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 148270f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 148339e00950SLois Curfman McInnes } 14843f97c4b0SBarry Smith } else { 14851ca473b0SSatish Balay if (idx) *idx = 0; 14861ca473b0SSatish Balay if (v) *v = 0; 14871ca473b0SSatish Balay } 1488154123eaSLois Curfman McInnes } 148939e00950SLois Curfman McInnes *nz = nztot; 1490f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1491f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 14923a40ed3dSBarry Smith PetscFunctionReturn(0); 149339e00950SLois Curfman McInnes } 149439e00950SLois Curfman McInnes 14954a2ae208SSatish Balay #undef __FUNCT__ 14964a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1497b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 149839e00950SLois Curfman McInnes { 14997a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 15003a40ed3dSBarry Smith 15013a40ed3dSBarry Smith PetscFunctionBegin; 1502abc0a331SBarry Smith if (!aij->getrowactive) { 1503abc0a331SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 15047a0afa10SBarry Smith } 15057a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 15063a40ed3dSBarry Smith PetscFunctionReturn(0); 150739e00950SLois Curfman McInnes } 150839e00950SLois Curfman McInnes 15094a2ae208SSatish Balay #undef __FUNCT__ 15104a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 1511dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 1512855ac2c5SLois Curfman McInnes { 1513855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1514ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 1515dfbe8321SBarry Smith PetscErrorCode ierr; 1516d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 1517329f5518SBarry Smith PetscReal sum = 0.0; 1518a77337e4SBarry Smith MatScalar *v; 151904ca555eSLois Curfman McInnes 15203a40ed3dSBarry Smith PetscFunctionBegin; 152117699dbbSLois Curfman McInnes if (aij->size == 1) { 152214183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 152337fa93a5SLois Curfman McInnes } else { 152404ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 152504ca555eSLois Curfman McInnes v = amat->a; 152604ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 1527aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1528329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 152904ca555eSLois Curfman McInnes #else 153004ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 153104ca555eSLois Curfman McInnes #endif 153204ca555eSLois Curfman McInnes } 153304ca555eSLois Curfman McInnes v = bmat->a; 153404ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 1535aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1536329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 153704ca555eSLois Curfman McInnes #else 153804ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 153904ca555eSLois Curfman McInnes #endif 154004ca555eSLois Curfman McInnes } 15417adad957SLisandro Dalcin ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 154204ca555eSLois Curfman McInnes *norm = sqrt(*norm); 15433a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 1544329f5518SBarry Smith PetscReal *tmp,*tmp2; 1545b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 1546d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 1547d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 1548d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 154904ca555eSLois Curfman McInnes *norm = 0.0; 155004ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 155104ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 1552bfec09a0SHong Zhang tmp[cstart + *jj++ ] += PetscAbsScalar(*v); v++; 155304ca555eSLois Curfman McInnes } 155404ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 155504ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 1556bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 155704ca555eSLois Curfman McInnes } 1558d0f46423SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 1559d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 156004ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 156104ca555eSLois Curfman McInnes } 1562606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 1563606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 15643a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 1565329f5518SBarry Smith PetscReal ntemp = 0.0; 1566d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 1567bfec09a0SHong Zhang v = amat->a + amat->i[j]; 156804ca555eSLois Curfman McInnes sum = 0.0; 156904ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 1570cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 157104ca555eSLois Curfman McInnes } 1572bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 157304ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 1574cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 157504ca555eSLois Curfman McInnes } 1576515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 157704ca555eSLois Curfman McInnes } 15787adad957SLisandro Dalcin ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr); 1579ca161407SBarry Smith } else { 158029bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"No support for two norm"); 158104ca555eSLois Curfman McInnes } 158237fa93a5SLois Curfman McInnes } 15833a40ed3dSBarry Smith PetscFunctionReturn(0); 1584855ac2c5SLois Curfman McInnes } 1585855ac2c5SLois Curfman McInnes 15864a2ae208SSatish Balay #undef __FUNCT__ 15874a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 1588fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 1589b7c46309SBarry Smith { 1590b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1591da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 1592dfbe8321SBarry Smith PetscErrorCode ierr; 1593d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz; 1594d0f46423SBarry Smith PetscInt cstart=A->cmap->rstart,ncol; 15953a40ed3dSBarry Smith Mat B; 1596a77337e4SBarry Smith MatScalar *array; 1597b7c46309SBarry Smith 15983a40ed3dSBarry Smith PetscFunctionBegin; 1599e9695a30SBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 1600da668accSHong Zhang 1601d0f46423SBarry Smith ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; 1602da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 1603da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 1604fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 1605fc73b1b3SBarry Smith /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */ 1606fc73b1b3SBarry Smith ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 1607da668accSHong Zhang ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr); 1608da668accSHong Zhang for (i=0; i<ai[ma]; i++){ 1609da668accSHong Zhang d_nnz[aj[i]] ++; 1610da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 1611d4bb536fSBarry Smith } 1612d4bb536fSBarry Smith 16137adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 1614d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 16157adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1616da668accSHong Zhang ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr); 1617fc73b1b3SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 1618fc4dec0aSBarry Smith } else { 1619fc4dec0aSBarry Smith B = *matout; 1620fc4dec0aSBarry Smith } 1621b7c46309SBarry Smith 1622b7c46309SBarry Smith /* copy over the A part */ 1623da668accSHong Zhang array = Aloc->a; 1624d0f46423SBarry Smith row = A->rmap->rstart; 1625da668accSHong Zhang for (i=0; i<ma; i++) { 1626da668accSHong Zhang ncol = ai[i+1]-ai[i]; 1627da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 1628da668accSHong Zhang row++; array += ncol; aj += ncol; 1629b7c46309SBarry Smith } 1630b7c46309SBarry Smith aj = Aloc->j; 1631da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 1632b7c46309SBarry Smith 1633b7c46309SBarry Smith /* copy over the B part */ 1634fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1635fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 1636da668accSHong Zhang array = Bloc->a; 1637d0f46423SBarry Smith row = A->rmap->rstart; 1638da668accSHong Zhang for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];} 163961a2fbbaSHong Zhang cols_tmp = cols; 1640da668accSHong Zhang for (i=0; i<mb; i++) { 1641da668accSHong Zhang ncol = bi[i+1]-bi[i]; 164261a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 164361a2fbbaSHong Zhang row++; array += ncol; cols_tmp += ncol; 1644b7c46309SBarry Smith } 1645fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 1646fc73b1b3SBarry Smith 16476d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 16486d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1649815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 16500de55854SLois Curfman McInnes *matout = B; 16510de55854SLois Curfman McInnes } else { 1652273d9f13SBarry Smith ierr = MatHeaderCopy(A,B);CHKERRQ(ierr); 16530de55854SLois Curfman McInnes } 16543a40ed3dSBarry Smith PetscFunctionReturn(0); 1655b7c46309SBarry Smith } 1656b7c46309SBarry Smith 16574a2ae208SSatish Balay #undef __FUNCT__ 16584a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 1659dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 1660a008b906SSatish Balay { 16614b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 16624b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 1663dfbe8321SBarry Smith PetscErrorCode ierr; 1664b1d57f15SBarry Smith PetscInt s1,s2,s3; 1665a008b906SSatish Balay 16663a40ed3dSBarry Smith PetscFunctionBegin; 16674b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 16684b967eb1SSatish Balay if (rr) { 1669e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 167029bbc08cSBarry Smith if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 16714b967eb1SSatish Balay /* Overlap communication with computation. */ 1672ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1673a008b906SSatish Balay } 16744b967eb1SSatish Balay if (ll) { 1675e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 167629bbc08cSBarry Smith if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 1677f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 16784b967eb1SSatish Balay } 16794b967eb1SSatish Balay /* scale the diagonal block */ 1680f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 16814b967eb1SSatish Balay 16824b967eb1SSatish Balay if (rr) { 16834b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 1684ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1685f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 16864b967eb1SSatish Balay } 16874b967eb1SSatish Balay 16883a40ed3dSBarry Smith PetscFunctionReturn(0); 1689a008b906SSatish Balay } 1690a008b906SSatish Balay 16914a2ae208SSatish Balay #undef __FUNCT__ 1692521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ" 1693521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs) 16945a838052SSatish Balay { 1695521d7252SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1696521d7252SBarry Smith PetscErrorCode ierr; 1697521d7252SBarry Smith 16983a40ed3dSBarry Smith PetscFunctionBegin; 1699521d7252SBarry Smith ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr); 1700521d7252SBarry Smith ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr); 17013a40ed3dSBarry Smith PetscFunctionReturn(0); 17025a838052SSatish Balay } 17034a2ae208SSatish Balay #undef __FUNCT__ 17044a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 1705dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 1706bb5a7306SBarry Smith { 1707bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1708dfbe8321SBarry Smith PetscErrorCode ierr; 17093a40ed3dSBarry Smith 17103a40ed3dSBarry Smith PetscFunctionBegin; 1711bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 17123a40ed3dSBarry Smith PetscFunctionReturn(0); 1713bb5a7306SBarry Smith } 1714bb5a7306SBarry Smith 17154a2ae208SSatish Balay #undef __FUNCT__ 17164a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 1717dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag) 1718d4bb536fSBarry Smith { 1719d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 1720d4bb536fSBarry Smith Mat a,b,c,d; 1721d4bb536fSBarry Smith PetscTruth flg; 1722dfbe8321SBarry Smith PetscErrorCode ierr; 1723d4bb536fSBarry Smith 17243a40ed3dSBarry Smith PetscFunctionBegin; 1725d4bb536fSBarry Smith a = matA->A; b = matA->B; 1726d4bb536fSBarry Smith c = matB->A; d = matB->B; 1727d4bb536fSBarry Smith 1728d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 1729abc0a331SBarry Smith if (flg) { 1730d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 1731d4bb536fSBarry Smith } 17327adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 17333a40ed3dSBarry Smith PetscFunctionReturn(0); 1734d4bb536fSBarry Smith } 1735d4bb536fSBarry Smith 17364a2ae208SSatish Balay #undef __FUNCT__ 17374a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 1738dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 1739cb5b572fSBarry Smith { 1740dfbe8321SBarry Smith PetscErrorCode ierr; 1741cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 1742cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ *)B->data; 1743cb5b572fSBarry Smith 1744cb5b572fSBarry Smith PetscFunctionBegin; 174533f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 174633f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 1747cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 1748cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 1749cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 1750cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 1751cb5b572fSBarry Smith then copying the submatrices */ 1752cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1753cb5b572fSBarry Smith } else { 1754cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 1755cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 1756cb5b572fSBarry Smith } 1757cb5b572fSBarry Smith PetscFunctionReturn(0); 1758cb5b572fSBarry Smith } 1759cb5b572fSBarry Smith 17604a2ae208SSatish Balay #undef __FUNCT__ 17614a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ" 1762dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A) 1763273d9f13SBarry Smith { 1764dfbe8321SBarry Smith PetscErrorCode ierr; 1765273d9f13SBarry Smith 1766273d9f13SBarry Smith PetscFunctionBegin; 1767273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 1768273d9f13SBarry Smith PetscFunctionReturn(0); 1769273d9f13SBarry Smith } 1770273d9f13SBarry Smith 1771ac90fabeSBarry Smith #include "petscblaslapack.h" 1772ac90fabeSBarry Smith #undef __FUNCT__ 1773ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 1774f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 1775ac90fabeSBarry Smith { 1776dfbe8321SBarry Smith PetscErrorCode ierr; 1777b1d57f15SBarry Smith PetscInt i; 1778ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data; 17794ce68768SBarry Smith PetscBLASInt bnz,one=1; 1780ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 1781ac90fabeSBarry Smith 1782ac90fabeSBarry Smith PetscFunctionBegin; 1783ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 1784f4df32b1SMatthew Knepley PetscScalar alpha = a; 1785ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->A->data; 1786ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->A->data; 17870805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1788f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1789ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->B->data; 1790ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->B->data; 17910805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1792f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1793a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 1794f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 1795c537a176SHong Zhang 1796c537a176SHong Zhang x = (Mat_SeqAIJ *)xx->B->data; 1797a30b2313SHong Zhang y = (Mat_SeqAIJ *)yy->B->data; 1798a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 1799a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 1800a30b2313SHong Zhang ierr = MatDestroy(y->XtoY);CHKERRQ(ierr); 1801c537a176SHong Zhang } 1802a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 1803d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 1804a30b2313SHong Zhang y->XtoY = xx->B; 1805407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 1806c537a176SHong Zhang } 1807f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 1808ac90fabeSBarry Smith } else { 1809f4df32b1SMatthew Knepley ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 1810ac90fabeSBarry Smith } 1811ac90fabeSBarry Smith PetscFunctionReturn(0); 1812ac90fabeSBarry Smith } 1813ac90fabeSBarry Smith 1814354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat); 1815354c94deSBarry Smith 1816354c94deSBarry Smith #undef __FUNCT__ 1817354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 1818354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat) 1819354c94deSBarry Smith { 1820354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 1821354c94deSBarry Smith PetscErrorCode ierr; 1822354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 1823354c94deSBarry Smith 1824354c94deSBarry Smith PetscFunctionBegin; 1825354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 1826354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 1827354c94deSBarry Smith #else 1828354c94deSBarry Smith PetscFunctionBegin; 1829354c94deSBarry Smith #endif 1830354c94deSBarry Smith PetscFunctionReturn(0); 1831354c94deSBarry Smith } 1832354c94deSBarry Smith 183399cafbc1SBarry Smith #undef __FUNCT__ 183499cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 183599cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 183699cafbc1SBarry Smith { 183799cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 183899cafbc1SBarry Smith PetscErrorCode ierr; 183999cafbc1SBarry Smith 184099cafbc1SBarry Smith PetscFunctionBegin; 184199cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 184299cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 184399cafbc1SBarry Smith PetscFunctionReturn(0); 184499cafbc1SBarry Smith } 184599cafbc1SBarry Smith 184699cafbc1SBarry Smith #undef __FUNCT__ 184799cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 184899cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 184999cafbc1SBarry Smith { 185099cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 185199cafbc1SBarry Smith PetscErrorCode ierr; 185299cafbc1SBarry Smith 185399cafbc1SBarry Smith PetscFunctionBegin; 185499cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 185599cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 185699cafbc1SBarry Smith PetscFunctionReturn(0); 185799cafbc1SBarry Smith } 185899cafbc1SBarry Smith 1859103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 1860103bf8bdSMatthew Knepley 1861103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 1862a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 1863a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 1864a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 1865103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 1866a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 1867d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 1868103bf8bdSMatthew Knepley 1869103bf8bdSMatthew Knepley #undef __FUNCT__ 1870103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 1871103bf8bdSMatthew Knepley /* 1872103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1873103bf8bdSMatthew Knepley */ 18740481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 1875103bf8bdSMatthew Knepley { 1876a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 1877a2c909beSMatthew Knepley 1878a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1879a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 1880a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 1881a2c909beSMatthew Knepley 1882103bf8bdSMatthew Knepley PetscTruth row_identity, col_identity; 1883776b82aeSLisandro Dalcin PetscContainer c; 1884103bf8bdSMatthew Knepley PetscInt m, n, M, N; 1885103bf8bdSMatthew Knepley PetscErrorCode ierr; 1886103bf8bdSMatthew Knepley 1887103bf8bdSMatthew Knepley PetscFunctionBegin; 1888103bf8bdSMatthew Knepley if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 1889103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 1890103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 1891103bf8bdSMatthew Knepley if (!row_identity || !col_identity) { 1892103bf8bdSMatthew Knepley SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 1893103bf8bdSMatthew Knepley } 1894103bf8bdSMatthew Knepley 1895103bf8bdSMatthew Knepley process_group_type pg; 1896a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1897a2c909beSMatthew Knepley lgraph_type* lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 1898a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1899a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1900a2c909beSMatthew Knepley 1901103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 1902a2c909beSMatthew Knepley ilu_permuted(level_graph); 1903103bf8bdSMatthew Knepley 1904103bf8bdSMatthew Knepley /* put together the new matrix */ 19057adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr); 1906103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 1907103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 1908719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 1909719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 1910719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1911719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1912103bf8bdSMatthew Knepley 19137adad957SLisandro Dalcin ierr = PetscContainerCreate(((PetscObject)A)->comm, &c); 1914776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 1915719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 1916103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1917103bf8bdSMatthew Knepley } 1918103bf8bdSMatthew Knepley 1919103bf8bdSMatthew Knepley #undef __FUNCT__ 1920103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 19210481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 1922103bf8bdSMatthew Knepley { 1923103bf8bdSMatthew Knepley PetscFunctionBegin; 1924103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1925103bf8bdSMatthew Knepley } 1926103bf8bdSMatthew Knepley 1927103bf8bdSMatthew Knepley #undef __FUNCT__ 1928103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 1929103bf8bdSMatthew Knepley /* 1930103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1931103bf8bdSMatthew Knepley */ 1932103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 1933103bf8bdSMatthew Knepley { 1934a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1935a2c909beSMatthew Knepley 1936a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1937a2c909beSMatthew Knepley lgraph_type* lgraph_p; 1938776b82aeSLisandro Dalcin PetscContainer c; 1939103bf8bdSMatthew Knepley PetscErrorCode ierr; 1940103bf8bdSMatthew Knepley 1941103bf8bdSMatthew Knepley PetscFunctionBegin; 1942103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr); 1943776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr); 1944103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 1945a2c909beSMatthew Knepley 1946a2c909beSMatthew Knepley PetscScalar* array_x; 1947a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 1948a2c909beSMatthew Knepley PetscInt sx; 1949a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 1950a2c909beSMatthew Knepley 1951a2c909beSMatthew Knepley PetscScalar* array_b; 1952a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 1953a2c909beSMatthew Knepley PetscInt sb; 1954a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 1955a2c909beSMatthew Knepley 1956a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1957a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1958a2c909beSMatthew Knepley 1959a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 1960a2c909beSMatthew Knepley array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]), 1961a2c909beSMatthew Knepley ref_x(array_x, boost::extents[num_vertices(graph)]); 1962a2c909beSMatthew Knepley 1963a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 1964a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 1965a2c909beSMatthew Knepley gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)), 1966a2c909beSMatthew Knepley vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 1967a2c909beSMatthew Knepley 1968a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 1969a2c909beSMatthew Knepley 1970103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1971103bf8bdSMatthew Knepley } 1972103bf8bdSMatthew Knepley #endif 1973103bf8bdSMatthew Knepley 197469db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */ 197569db28dcSHong Zhang PetscInt nzlocal,nsends,nrecvs; 1976aa5bb8c0SSatish Balay PetscMPIInt *send_rank; 1977aa5bb8c0SSatish Balay PetscInt *sbuf_nz,*sbuf_j,**rbuf_j; 197869db28dcSHong Zhang PetscScalar *sbuf_a,**rbuf_a; 197969db28dcSHong Zhang PetscErrorCode (*MatDestroy)(Mat); 198069db28dcSHong Zhang } Mat_Redundant; 198169db28dcSHong Zhang 198269db28dcSHong Zhang #undef __FUNCT__ 198369db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant" 198469db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr) 198569db28dcSHong Zhang { 198669db28dcSHong Zhang PetscErrorCode ierr; 198769db28dcSHong Zhang Mat_Redundant *redund=(Mat_Redundant*)ptr; 198869db28dcSHong Zhang PetscInt i; 198969db28dcSHong Zhang 199069db28dcSHong Zhang PetscFunctionBegin; 199169db28dcSHong Zhang ierr = PetscFree(redund->send_rank);CHKERRQ(ierr); 199269db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 199369db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 199469db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++){ 199569db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 199669db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 199769db28dcSHong Zhang } 199869db28dcSHong Zhang ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 199969db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 200069db28dcSHong Zhang PetscFunctionReturn(0); 200169db28dcSHong Zhang } 200269db28dcSHong Zhang 200369db28dcSHong Zhang #undef __FUNCT__ 200469db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 200569db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 200669db28dcSHong Zhang { 200769db28dcSHong Zhang PetscErrorCode ierr; 200869db28dcSHong Zhang PetscContainer container; 200969db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 201069db28dcSHong Zhang 201169db28dcSHong Zhang PetscFunctionBegin; 201269db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 201369db28dcSHong Zhang if (container) { 201469db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 201569db28dcSHong Zhang } else { 201669db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 201769db28dcSHong Zhang } 201869db28dcSHong Zhang A->ops->destroy = redund->MatDestroy; 201969db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr); 202069db28dcSHong Zhang ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 202169db28dcSHong Zhang ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 202269db28dcSHong Zhang PetscFunctionReturn(0); 202369db28dcSHong Zhang } 202469db28dcSHong Zhang 202569db28dcSHong Zhang #undef __FUNCT__ 202669db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 202769db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant) 202869db28dcSHong Zhang { 202969db28dcSHong Zhang PetscMPIInt rank,size; 20307adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mat)->comm; 203169db28dcSHong Zhang PetscErrorCode ierr; 203269db28dcSHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0; 203369db28dcSHong Zhang PetscMPIInt *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL; 2034d0f46423SBarry Smith PetscInt *rowrange=mat->rmap->range; 203569db28dcSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 203669db28dcSHong Zhang Mat A=aij->A,B=aij->B,C=*matredundant; 203769db28dcSHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 203869db28dcSHong Zhang PetscScalar *sbuf_a; 203969db28dcSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2040d0f46423SBarry Smith PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 2041d0f46423SBarry Smith PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N; 204269db28dcSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2043a77337e4SBarry Smith MatScalar *aworkA,*aworkB; 2044a77337e4SBarry Smith PetscScalar *vals; 204569db28dcSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 204669db28dcSHong Zhang MPI_Request *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL, 204769db28dcSHong Zhang *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL; 204869db28dcSHong Zhang MPI_Status recv_status,*send_status; 204969db28dcSHong Zhang PetscInt *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count; 205069db28dcSHong Zhang PetscInt **rbuf_j=PETSC_NULL; 205169db28dcSHong Zhang PetscScalar **rbuf_a=PETSC_NULL; 205269db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 205369db28dcSHong Zhang PetscContainer container; 205469db28dcSHong Zhang 205569db28dcSHong Zhang PetscFunctionBegin; 205669db28dcSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 205769db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 205869db28dcSHong Zhang 205969db28dcSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 206069db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2061d0f46423SBarry Smith if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 206269db28dcSHong Zhang ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr); 206369db28dcSHong Zhang if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size"); 206469db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 206569db28dcSHong Zhang if (container) { 206669db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 206769db28dcSHong Zhang } else { 206869db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 206969db28dcSHong Zhang } 207069db28dcSHong Zhang if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 207169db28dcSHong Zhang 207269db28dcSHong Zhang nsends = redund->nsends; 207369db28dcSHong Zhang nrecvs = redund->nrecvs; 207469db28dcSHong Zhang send_rank = redund->send_rank; recv_rank = send_rank + size; 207569db28dcSHong Zhang sbuf_nz = redund->sbuf_nz; rbuf_nz = sbuf_nz + nsends; 207669db28dcSHong Zhang sbuf_j = redund->sbuf_j; 207769db28dcSHong Zhang sbuf_a = redund->sbuf_a; 207869db28dcSHong Zhang rbuf_j = redund->rbuf_j; 207969db28dcSHong Zhang rbuf_a = redund->rbuf_a; 208069db28dcSHong Zhang } 208169db28dcSHong Zhang 208269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 208369db28dcSHong Zhang PetscMPIInt subrank,subsize; 208469db28dcSHong Zhang PetscInt nleftover,np_subcomm; 208569db28dcSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 208669db28dcSHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 208769db28dcSHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 208869db28dcSHong Zhang ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank); 208969db28dcSHong Zhang recv_rank = send_rank + size; 209069db28dcSHong Zhang np_subcomm = size/nsubcomm; 209169db28dcSHong Zhang nleftover = size - nsubcomm*np_subcomm; 209269db28dcSHong Zhang nsends = 0; nrecvs = 0; 209369db28dcSHong Zhang for (i=0; i<size; i++){ /* i=rank*/ 209469db28dcSHong Zhang if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */ 209569db28dcSHong Zhang send_rank[nsends] = i; nsends++; 209669db28dcSHong Zhang recv_rank[nrecvs++] = i; 209769db28dcSHong Zhang } 209869db28dcSHong Zhang } 209969db28dcSHong Zhang if (rank >= size - nleftover){/* this proc is a leftover processor */ 210069db28dcSHong Zhang i = size-nleftover-1; 210169db28dcSHong Zhang j = 0; 210269db28dcSHong Zhang while (j < nsubcomm - nleftover){ 210369db28dcSHong Zhang send_rank[nsends++] = i; 210469db28dcSHong Zhang i--; j++; 210569db28dcSHong Zhang } 210669db28dcSHong Zhang } 210769db28dcSHong Zhang 210869db28dcSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */ 210969db28dcSHong Zhang for (i=0; i<nleftover; i++){ 211069db28dcSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 211169db28dcSHong Zhang } 211269db28dcSHong Zhang } 211369db28dcSHong Zhang 211469db28dcSHong Zhang /* allocate sbuf_j, sbuf_a */ 211569db28dcSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 211669db28dcSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 211769db28dcSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 211869db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 211969db28dcSHong Zhang 212069db28dcSHong Zhang /* copy mat's local entries into the buffers */ 212169db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 212269db28dcSHong Zhang rownz_max = 0; 212369db28dcSHong Zhang rptr = sbuf_j; 212469db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 212569db28dcSHong Zhang vals = sbuf_a; 212669db28dcSHong Zhang rptr[0] = 0; 212769db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 212869db28dcSHong Zhang row = i + rstart; 212969db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 213069db28dcSHong Zhang ncols = nzA + nzB; 213169db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 213269db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 213369db28dcSHong Zhang /* load the column indices for this row into cols */ 213469db28dcSHong Zhang lwrite = 0; 213569db28dcSHong Zhang for (l=0; l<nzB; l++) { 213669db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart){ 213769db28dcSHong Zhang vals[lwrite] = aworkB[l]; 213869db28dcSHong Zhang cols[lwrite++] = ctmp; 213969db28dcSHong Zhang } 214069db28dcSHong Zhang } 214169db28dcSHong Zhang for (l=0; l<nzA; l++){ 214269db28dcSHong Zhang vals[lwrite] = aworkA[l]; 214369db28dcSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 214469db28dcSHong Zhang } 214569db28dcSHong Zhang for (l=0; l<nzB; l++) { 214669db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend){ 214769db28dcSHong Zhang vals[lwrite] = aworkB[l]; 214869db28dcSHong Zhang cols[lwrite++] = ctmp; 214969db28dcSHong Zhang } 215069db28dcSHong Zhang } 215169db28dcSHong Zhang vals += ncols; 215269db28dcSHong Zhang cols += ncols; 215369db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 215469db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 215569db28dcSHong Zhang } 215669db28dcSHong Zhang if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 215769db28dcSHong Zhang } else { /* only copy matrix values into sbuf_a */ 215869db28dcSHong Zhang rptr = sbuf_j; 215969db28dcSHong Zhang vals = sbuf_a; 216069db28dcSHong Zhang rptr[0] = 0; 216169db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 216269db28dcSHong Zhang row = i + rstart; 216369db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 216469db28dcSHong Zhang ncols = nzA + nzB; 216569db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 216669db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 216769db28dcSHong Zhang lwrite = 0; 216869db28dcSHong Zhang for (l=0; l<nzB; l++) { 216969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 217069db28dcSHong Zhang } 217169db28dcSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 217269db28dcSHong Zhang for (l=0; l<nzB; l++) { 217369db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 217469db28dcSHong Zhang } 217569db28dcSHong Zhang vals += ncols; 217669db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 217769db28dcSHong Zhang } 217869db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 217969db28dcSHong Zhang 218069db28dcSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 218169db28dcSHong Zhang /*--------------------------------------------------*/ 218269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 218369db28dcSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 218469db28dcSHong Zhang s_waits2 = s_waits3 + nsends; 218569db28dcSHong Zhang s_waits1 = s_waits2 + nsends; 218669db28dcSHong Zhang r_waits1 = s_waits1 + nsends; 218769db28dcSHong Zhang r_waits2 = r_waits1 + nrecvs; 218869db28dcSHong Zhang r_waits3 = r_waits2 + nrecvs; 218969db28dcSHong Zhang } else { 219069db28dcSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 219169db28dcSHong Zhang r_waits3 = s_waits3 + nsends; 219269db28dcSHong Zhang } 219369db28dcSHong Zhang 219469db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 219569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 219669db28dcSHong Zhang /* get new tags to keep the communication clean */ 219769db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 219869db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 219969db28dcSHong Zhang ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 220069db28dcSHong Zhang rbuf_nz = sbuf_nz + nsends; 220169db28dcSHong Zhang 220269db28dcSHong Zhang /* post receives of other's nzlocal */ 220369db28dcSHong Zhang for (i=0; i<nrecvs; i++){ 220469db28dcSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 220569db28dcSHong Zhang } 220669db28dcSHong Zhang /* send nzlocal to others */ 220769db28dcSHong Zhang for (i=0; i<nsends; i++){ 220869db28dcSHong Zhang sbuf_nz[i] = nzlocal; 220969db28dcSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 221069db28dcSHong Zhang } 221169db28dcSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 221269db28dcSHong Zhang count = nrecvs; 221369db28dcSHong Zhang while (count) { 221469db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 221569db28dcSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 221669db28dcSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 221769db28dcSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 221869db28dcSHong Zhang 221969db28dcSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 222069db28dcSHong Zhang rbuf_nz[imdex] += i + 2; 222169db28dcSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 222269db28dcSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 222369db28dcSHong Zhang count--; 222469db28dcSHong Zhang } 222569db28dcSHong Zhang /* wait on sends of nzlocal */ 222669db28dcSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 222769db28dcSHong Zhang /* send mat->i,j to others, and recv from other's */ 222869db28dcSHong Zhang /*------------------------------------------------*/ 222969db28dcSHong Zhang for (i=0; i<nsends; i++){ 223069db28dcSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 223169db28dcSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 223269db28dcSHong Zhang } 223369db28dcSHong Zhang /* wait on receives of mat->i,j */ 223469db28dcSHong Zhang /*------------------------------*/ 223569db28dcSHong Zhang count = nrecvs; 223669db28dcSHong Zhang while (count) { 223769db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 223869db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 223969db28dcSHong Zhang count--; 224069db28dcSHong Zhang } 224169db28dcSHong Zhang /* wait on sends of mat->i,j */ 224269db28dcSHong Zhang /*---------------------------*/ 224369db28dcSHong Zhang if (nsends) { 224469db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 224569db28dcSHong Zhang } 224669db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 224769db28dcSHong Zhang 224869db28dcSHong Zhang /* post receives, send and receive mat->a */ 224969db28dcSHong Zhang /*----------------------------------------*/ 225069db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 225169db28dcSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 225269db28dcSHong Zhang } 225369db28dcSHong Zhang for (i=0; i<nsends; i++){ 225469db28dcSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 225569db28dcSHong Zhang } 225669db28dcSHong Zhang count = nrecvs; 225769db28dcSHong Zhang while (count) { 225869db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 225969db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 226069db28dcSHong Zhang count--; 226169db28dcSHong Zhang } 226269db28dcSHong Zhang if (nsends) { 226369db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 226469db28dcSHong Zhang } 226569db28dcSHong Zhang 226669db28dcSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 226769db28dcSHong Zhang 226869db28dcSHong Zhang /* create redundant matrix */ 226969db28dcSHong Zhang /*-------------------------*/ 227069db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 227169db28dcSHong Zhang /* compute rownz_max for preallocation */ 227269db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 227369db28dcSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 227469db28dcSHong Zhang rptr = rbuf_j[imdex]; 227569db28dcSHong Zhang for (i=0; i<j; i++){ 227669db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 227769db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 227869db28dcSHong Zhang } 227969db28dcSHong Zhang } 228069db28dcSHong Zhang 228169db28dcSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 228269db28dcSHong Zhang ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 228369db28dcSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 228469db28dcSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr); 228569db28dcSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr); 228669db28dcSHong Zhang } else { 228769db28dcSHong Zhang C = *matredundant; 228869db28dcSHong Zhang } 228969db28dcSHong Zhang 229069db28dcSHong Zhang /* insert local matrix entries */ 229169db28dcSHong Zhang rptr = sbuf_j; 229269db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 229369db28dcSHong Zhang vals = sbuf_a; 229469db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 229569db28dcSHong Zhang row = i + rstart; 229669db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 229769db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 229869db28dcSHong Zhang vals += ncols; 229969db28dcSHong Zhang cols += ncols; 230069db28dcSHong Zhang } 230169db28dcSHong Zhang /* insert received matrix entries */ 230269db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 230369db28dcSHong Zhang rstart = rowrange[recv_rank[imdex]]; 230469db28dcSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 230569db28dcSHong Zhang rptr = rbuf_j[imdex]; 230669db28dcSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 230769db28dcSHong Zhang vals = rbuf_a[imdex]; 230869db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 230969db28dcSHong Zhang row = i + rstart; 231069db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 231169db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 231269db28dcSHong Zhang vals += ncols; 231369db28dcSHong Zhang cols += ncols; 231469db28dcSHong Zhang } 231569db28dcSHong Zhang } 231669db28dcSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 231769db28dcSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 231869db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2319d0f46423SBarry Smith if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N); 232069db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 232169db28dcSHong Zhang PetscContainer container; 232269db28dcSHong Zhang *matredundant = C; 232369db28dcSHong Zhang /* create a supporting struct and attach it to C for reuse */ 232438f2d2fdSLisandro Dalcin ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 232569db28dcSHong Zhang ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 232669db28dcSHong Zhang ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr); 232769db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr); 232869db28dcSHong Zhang ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr); 232969db28dcSHong Zhang 233069db28dcSHong Zhang redund->nzlocal = nzlocal; 233169db28dcSHong Zhang redund->nsends = nsends; 233269db28dcSHong Zhang redund->nrecvs = nrecvs; 233369db28dcSHong Zhang redund->send_rank = send_rank; 233469db28dcSHong Zhang redund->sbuf_nz = sbuf_nz; 233569db28dcSHong Zhang redund->sbuf_j = sbuf_j; 233669db28dcSHong Zhang redund->sbuf_a = sbuf_a; 233769db28dcSHong Zhang redund->rbuf_j = rbuf_j; 233869db28dcSHong Zhang redund->rbuf_a = rbuf_a; 233969db28dcSHong Zhang 234069db28dcSHong Zhang redund->MatDestroy = C->ops->destroy; 234169db28dcSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 234269db28dcSHong Zhang } 234369db28dcSHong Zhang PetscFunctionReturn(0); 234469db28dcSHong Zhang } 234569db28dcSHong Zhang 234603bc72f1SMatthew Knepley #undef __FUNCT__ 2347c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2348c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2349c91732d9SHong Zhang { 2350c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2351c91732d9SHong Zhang PetscErrorCode ierr; 2352c91732d9SHong Zhang PetscInt i,*idxb = 0; 2353c91732d9SHong Zhang PetscScalar *va,*vb; 2354c91732d9SHong Zhang Vec vtmp; 2355c91732d9SHong Zhang 2356c91732d9SHong Zhang PetscFunctionBegin; 2357c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 2358c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2359c91732d9SHong Zhang if (idx) { 2360192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 2361d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2362c91732d9SHong Zhang } 2363c91732d9SHong Zhang } 2364c91732d9SHong Zhang 2365d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2366c91732d9SHong Zhang if (idx) { 2367d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2368c91732d9SHong Zhang } 2369c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2370c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2371c91732d9SHong Zhang 2372d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++){ 2373c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 2374c91732d9SHong Zhang va[i] = vb[i]; 2375c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 2376c91732d9SHong Zhang } 2377c91732d9SHong Zhang } 2378c91732d9SHong Zhang 2379c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2380c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2381c91732d9SHong Zhang if (idxb) { 2382c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 2383c91732d9SHong Zhang } 2384c91732d9SHong Zhang ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2385c91732d9SHong Zhang PetscFunctionReturn(0); 2386c91732d9SHong Zhang } 2387c91732d9SHong Zhang 2388c91732d9SHong Zhang #undef __FUNCT__ 2389c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 2390c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2391c87e5d42SMatthew Knepley { 2392c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2393c87e5d42SMatthew Knepley PetscErrorCode ierr; 2394c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 2395c87e5d42SMatthew Knepley PetscScalar *va,*vb; 2396c87e5d42SMatthew Knepley Vec vtmp; 2397c87e5d42SMatthew Knepley 2398c87e5d42SMatthew Knepley PetscFunctionBegin; 2399c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 2400c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2401c87e5d42SMatthew Knepley if (idx) { 2402c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 2403c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2404c87e5d42SMatthew Knepley } 2405c87e5d42SMatthew Knepley } 2406c87e5d42SMatthew Knepley 2407c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2408c87e5d42SMatthew Knepley if (idx) { 2409c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2410c87e5d42SMatthew Knepley } 2411c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2412c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2413c87e5d42SMatthew Knepley 2414c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++){ 2415c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 2416c87e5d42SMatthew Knepley va[i] = vb[i]; 2417c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 2418c87e5d42SMatthew Knepley } 2419c87e5d42SMatthew Knepley } 2420c87e5d42SMatthew Knepley 2421c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2422c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2423c87e5d42SMatthew Knepley if (idxb) { 2424c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 2425c87e5d42SMatthew Knepley } 2426c87e5d42SMatthew Knepley ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2427c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2428c87e5d42SMatthew Knepley } 2429c87e5d42SMatthew Knepley 2430c87e5d42SMatthew Knepley #undef __FUNCT__ 243103bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 243203bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 243303bc72f1SMatthew Knepley { 243403bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2435d0f46423SBarry Smith PetscInt n = A->rmap->n; 2436d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 243703bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 243803bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 243903bc72f1SMatthew Knepley Vec diagV, offdiagV; 244003bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 244103bc72f1SMatthew Knepley PetscInt r; 244203bc72f1SMatthew Knepley PetscErrorCode ierr; 244303bc72f1SMatthew Knepley 244403bc72f1SMatthew Knepley PetscFunctionBegin; 244503bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2446e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2447e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 244803bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 244903bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 245003bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 245103bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 245203bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 245303bc72f1SMatthew Knepley for(r = 0; r < n; ++r) { 2454028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 245503bc72f1SMatthew Knepley a[r] = diagA[r]; 245603bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 245703bc72f1SMatthew Knepley } else { 245803bc72f1SMatthew Knepley a[r] = offdiagA[r]; 245903bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 246003bc72f1SMatthew Knepley } 246103bc72f1SMatthew Knepley } 246203bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 246303bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 246403bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 246503bc72f1SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 246603bc72f1SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 246703bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 246803bc72f1SMatthew Knepley PetscFunctionReturn(0); 246903bc72f1SMatthew Knepley } 247003bc72f1SMatthew Knepley 24715494a064SHong Zhang #undef __FUNCT__ 2472c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 2473c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2474c87e5d42SMatthew Knepley { 2475c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2476c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 2477c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 2478c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 2479c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 2480c87e5d42SMatthew Knepley Vec diagV, offdiagV; 2481c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 2482c87e5d42SMatthew Knepley PetscInt r; 2483c87e5d42SMatthew Knepley PetscErrorCode ierr; 2484c87e5d42SMatthew Knepley 2485c87e5d42SMatthew Knepley PetscFunctionBegin; 2486c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2487c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2488c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 2489c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 2490c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 2491c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 2492c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 2493c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2494c87e5d42SMatthew Knepley for(r = 0; r < n; ++r) { 2495c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 2496c87e5d42SMatthew Knepley a[r] = diagA[r]; 2497c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 2498c87e5d42SMatthew Knepley } else { 2499c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 2500c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 2501c87e5d42SMatthew Knepley } 2502c87e5d42SMatthew Knepley } 2503c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 2504c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 2505c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2506c87e5d42SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 2507c87e5d42SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 2508c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 2509c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2510c87e5d42SMatthew Knepley } 2511c87e5d42SMatthew Knepley 2512c87e5d42SMatthew Knepley #undef __FUNCT__ 2513829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ" 2514829201f2SHong Zhang PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat[]) 25155494a064SHong Zhang { 25165494a064SHong Zhang PetscErrorCode ierr; 25175494a064SHong Zhang 25185494a064SHong Zhang PetscFunctionBegin; 25195494a064SHong Zhang ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,newmat);CHKERRQ(ierr); 25205494a064SHong Zhang PetscFunctionReturn(0); 25215494a064SHong Zhang } 25225494a064SHong Zhang 25238a729477SBarry Smith /* -------------------------------------------------------------------*/ 2524cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 2525cda55fadSBarry Smith MatGetRow_MPIAIJ, 2526cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 2527cda55fadSBarry Smith MatMult_MPIAIJ, 252897304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 25297c922b88SBarry Smith MatMultTranspose_MPIAIJ, 25307c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 2531103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2532103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 2533103bf8bdSMatthew Knepley #else 2534cda55fadSBarry Smith 0, 2535103bf8bdSMatthew Knepley #endif 2536cda55fadSBarry Smith 0, 2537cda55fadSBarry Smith 0, 253897304618SKris Buschelman /*10*/ 0, 2539cda55fadSBarry Smith 0, 2540cda55fadSBarry Smith 0, 254144a69424SLois Curfman McInnes MatRelax_MPIAIJ, 2542b7c46309SBarry Smith MatTranspose_MPIAIJ, 254397304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 2544cda55fadSBarry Smith MatEqual_MPIAIJ, 2545cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 2546cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 2547cda55fadSBarry Smith MatNorm_MPIAIJ, 254897304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 2549cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 25501eb62cbbSBarry Smith 0, 2551cda55fadSBarry Smith MatSetOption_MPIAIJ, 2552cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 255397304618SKris Buschelman /*25*/ MatZeroRows_MPIAIJ, 2554cda55fadSBarry Smith 0, 2555103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2556719d5645SBarry Smith 0, 2557103bf8bdSMatthew Knepley #else 2558cda55fadSBarry Smith 0, 2559103bf8bdSMatthew Knepley #endif 2560cda55fadSBarry Smith 0, 2561cda55fadSBarry Smith 0, 256297304618SKris Buschelman /*30*/ MatSetUpPreallocation_MPIAIJ, 2563103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2564719d5645SBarry Smith 0, 2565103bf8bdSMatthew Knepley #else 2566cda55fadSBarry Smith 0, 2567103bf8bdSMatthew Knepley #endif 2568cda55fadSBarry Smith 0, 2569cda55fadSBarry Smith 0, 2570cda55fadSBarry Smith 0, 257197304618SKris Buschelman /*35*/ MatDuplicate_MPIAIJ, 2572cda55fadSBarry Smith 0, 2573cda55fadSBarry Smith 0, 2574cda55fadSBarry Smith 0, 2575cda55fadSBarry Smith 0, 257697304618SKris Buschelman /*40*/ MatAXPY_MPIAIJ, 2577cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 2578cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 2579cda55fadSBarry Smith MatGetValues_MPIAIJ, 2580cb5b572fSBarry Smith MatCopy_MPIAIJ, 2581c87e5d42SMatthew Knepley /*45*/ MatGetRowMax_MPIAIJ, 2582cda55fadSBarry Smith MatScale_MPIAIJ, 2583cda55fadSBarry Smith 0, 2584cda55fadSBarry Smith 0, 2585cda55fadSBarry Smith 0, 2586521d7252SBarry Smith /*50*/ MatSetBlockSize_MPIAIJ, 2587cda55fadSBarry Smith 0, 2588cda55fadSBarry Smith 0, 2589cda55fadSBarry Smith 0, 2590cda55fadSBarry Smith 0, 259197304618SKris Buschelman /*55*/ MatFDColoringCreate_MPIAIJ, 2592cda55fadSBarry Smith 0, 2593cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 259442e855d1Svictor MatPermute_MPIAIJ, 2595cda55fadSBarry Smith 0, 259697304618SKris Buschelman /*60*/ MatGetSubMatrix_MPIAIJ, 2597e03a110bSBarry Smith MatDestroy_MPIAIJ, 2598e03a110bSBarry Smith MatView_MPIAIJ, 2599357abbc8SBarry Smith 0, 2600a2243be0SBarry Smith 0, 260197304618SKris Buschelman /*65*/ 0, 2602a2243be0SBarry Smith 0, 2603a2243be0SBarry Smith 0, 2604a2243be0SBarry Smith 0, 2605a2243be0SBarry Smith 0, 2606c91732d9SHong Zhang /*70*/ MatGetRowMaxAbs_MPIAIJ, 2607c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 2608a2243be0SBarry Smith 0, 2609a2243be0SBarry Smith MatSetColoring_MPIAIJ, 2610dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 2611779c1a83SBarry Smith MatSetValuesAdic_MPIAIJ, 2612dcf5cc72SBarry Smith #else 2613dcf5cc72SBarry Smith 0, 2614dcf5cc72SBarry Smith #endif 261597304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 261697304618SKris Buschelman /*75*/ 0, 261797304618SKris Buschelman 0, 261897304618SKris Buschelman 0, 261997304618SKris Buschelman 0, 262097304618SKris Buschelman 0, 262197304618SKris Buschelman /*80*/ 0, 262297304618SKris Buschelman 0, 262397304618SKris Buschelman 0, 262441acf15aSKris Buschelman /*84*/ MatLoad_MPIAIJ, 26256284ec50SHong Zhang 0, 26266284ec50SHong Zhang 0, 26276284ec50SHong Zhang 0, 26286284ec50SHong Zhang 0, 2629865e5f61SKris Buschelman 0, 2630865e5f61SKris Buschelman /*90*/ MatMatMult_MPIAIJ_MPIAIJ, 263126be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 263226be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 26337a7894deSKris Buschelman MatPtAP_Basic, 26347a7894deSKris Buschelman MatPtAPSymbolic_MPIAIJ, 26357a7894deSKris Buschelman /*95*/ MatPtAPNumeric_MPIAIJ, 26367a7894deSKris Buschelman 0, 26377a7894deSKris Buschelman 0, 26387a7894deSKris Buschelman 0, 26397a7894deSKris Buschelman 0, 26407a7894deSKris Buschelman /*100*/0, 2641865e5f61SKris Buschelman MatPtAPSymbolic_MPIAIJ_MPIAIJ, 26427a7894deSKris Buschelman MatPtAPNumeric_MPIAIJ_MPIAIJ, 26432fd7e33dSBarry Smith MatConjugate_MPIAIJ, 26442fd7e33dSBarry Smith 0, 264599cafbc1SBarry Smith /*105*/MatSetValuesRow_MPIAIJ, 264699cafbc1SBarry Smith MatRealPart_MPIAIJ, 264769db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 264869db28dcSHong Zhang 0, 264969db28dcSHong Zhang 0, 265069db28dcSHong Zhang /*110*/0, 265103bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 26525494a064SHong Zhang MatGetRowMin_MPIAIJ, 26535494a064SHong Zhang 0, 26545494a064SHong Zhang 0, 2655829201f2SHong Zhang /*115*/MatGetSeqNonzerostructure_MPIAIJ}; 265636ce4990SBarry Smith 26572e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 26582e8a6d31SBarry Smith 2659fb2e594dSBarry Smith EXTERN_C_BEGIN 26604a2ae208SSatish Balay #undef __FUNCT__ 26614a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 2662be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat) 26632e8a6d31SBarry Smith { 26642e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2665dfbe8321SBarry Smith PetscErrorCode ierr; 26662e8a6d31SBarry Smith 26672e8a6d31SBarry Smith PetscFunctionBegin; 26682e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 26692e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 26702e8a6d31SBarry Smith PetscFunctionReturn(0); 26712e8a6d31SBarry Smith } 2672fb2e594dSBarry Smith EXTERN_C_END 26732e8a6d31SBarry Smith 2674fb2e594dSBarry Smith EXTERN_C_BEGIN 26754a2ae208SSatish Balay #undef __FUNCT__ 26764a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 2677be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat) 26782e8a6d31SBarry Smith { 26792e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2680dfbe8321SBarry Smith PetscErrorCode ierr; 26812e8a6d31SBarry Smith 26822e8a6d31SBarry Smith PetscFunctionBegin; 26832e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 26842e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 26852e8a6d31SBarry Smith PetscFunctionReturn(0); 26862e8a6d31SBarry Smith } 2687fb2e594dSBarry Smith EXTERN_C_END 26888a729477SBarry Smith 2689e090d566SSatish Balay #include "petscpc.h" 269027508adbSBarry Smith EXTERN_C_BEGIN 26914a2ae208SSatish Balay #undef __FUNCT__ 2692a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 2693be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 2694a23d5eceSKris Buschelman { 2695a23d5eceSKris Buschelman Mat_MPIAIJ *b; 2696dfbe8321SBarry Smith PetscErrorCode ierr; 2697b1d57f15SBarry Smith PetscInt i; 2698a23d5eceSKris Buschelman 2699a23d5eceSKris Buschelman PetscFunctionBegin; 2700a23d5eceSKris Buschelman B->preallocated = PETSC_TRUE; 2701a23d5eceSKris Buschelman if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5; 2702a23d5eceSKris Buschelman if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2; 270377431f27SBarry Smith if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 270477431f27SBarry Smith if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 2705899cda47SBarry Smith 27067408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr); 27077408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr); 2708d0f46423SBarry Smith ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr); 2709d0f46423SBarry Smith ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr); 2710a23d5eceSKris Buschelman if (d_nnz) { 2711d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 271277431f27SBarry Smith if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]); 2713a23d5eceSKris Buschelman } 2714a23d5eceSKris Buschelman } 2715a23d5eceSKris Buschelman if (o_nnz) { 2716d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 271777431f27SBarry Smith if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]); 2718a23d5eceSKris Buschelman } 2719a23d5eceSKris Buschelman } 2720a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 2721899cda47SBarry Smith 2722899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 2723899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 2724d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 2725899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 2726899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 2727899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 2728d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 2729899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 2730899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 2731899cda47SBarry Smith 2732c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 2733c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 2734a23d5eceSKris Buschelman 2735a23d5eceSKris Buschelman PetscFunctionReturn(0); 2736a23d5eceSKris Buschelman } 2737a23d5eceSKris Buschelman EXTERN_C_END 2738a23d5eceSKris Buschelman 27394a2ae208SSatish Balay #undef __FUNCT__ 27404a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 2741dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 2742d6dfbf8fSBarry Smith { 2743d6dfbf8fSBarry Smith Mat mat; 2744416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 2745dfbe8321SBarry Smith PetscErrorCode ierr; 2746d6dfbf8fSBarry Smith 27473a40ed3dSBarry Smith PetscFunctionBegin; 2748416022c9SBarry Smith *newmat = 0; 27497adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 2750d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 27517adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 27521d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2753273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 2754e1b6402fSHong Zhang 2755d6dfbf8fSBarry Smith mat->factor = matin->factor; 2756d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 2757c456f294SBarry Smith mat->assembled = PETSC_TRUE; 2758e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 2759273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 2760d6dfbf8fSBarry Smith 276117699dbbSLois Curfman McInnes a->size = oldmat->size; 276217699dbbSLois Curfman McInnes a->rank = oldmat->rank; 2763e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 2764e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 2765e7641de0SSatish Balay a->rowindices = 0; 2766bcd2baecSBarry Smith a->rowvalues = 0; 2767bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 2768d6dfbf8fSBarry Smith 2769d0f46423SBarry Smith ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->rmap,mat->rmap);CHKERRQ(ierr); 2770d0f46423SBarry Smith ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->cmap,mat->cmap);CHKERRQ(ierr); 2771899cda47SBarry Smith 27727adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr); 27732ee70a88SLois Curfman McInnes if (oldmat->colmap) { 2774aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 27750f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 2776b1fc9764SSatish Balay #else 2777d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 2778d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2779d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2780b1fc9764SSatish Balay #endif 2781416022c9SBarry Smith } else a->colmap = 0; 27823f41c07dSBarry Smith if (oldmat->garray) { 2783b1d57f15SBarry Smith PetscInt len; 2784d0f46423SBarry Smith len = oldmat->B->cmap->n; 2785b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 278652e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 2787b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 2788416022c9SBarry Smith } else a->garray = 0; 2789d6dfbf8fSBarry Smith 2790416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 279152e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 2792a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 279352e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 27942e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 279552e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 27962e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 279752e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 27987adad957SLisandro Dalcin ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 27998a729477SBarry Smith *newmat = mat; 28003a40ed3dSBarry Smith PetscFunctionReturn(0); 28018a729477SBarry Smith } 2802416022c9SBarry Smith 2803e090d566SSatish Balay #include "petscsys.h" 2804416022c9SBarry Smith 28054a2ae208SSatish Balay #undef __FUNCT__ 28064a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ" 2807a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat) 2808416022c9SBarry Smith { 2809d65a2f8fSBarry Smith Mat A; 281087828ca2SBarry Smith PetscScalar *vals,*svals; 281119bcc07fSBarry Smith MPI_Comm comm = ((PetscObject)viewer)->comm; 2812416022c9SBarry Smith MPI_Status status; 28136849ba73SBarry Smith PetscErrorCode ierr; 281413980483SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,mpicnt,mpimaxnz; 281513980483SBarry Smith PetscInt i,nz,j,rstart,rend,mmax,maxnz; 2816b1d57f15SBarry Smith PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 2817910ba992SMatthew Knepley PetscInt *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols; 2818dc231df0SBarry Smith PetscInt cend,cstart,n,*rowners; 2819b1d57f15SBarry Smith int fd; 2820416022c9SBarry Smith 28213a40ed3dSBarry Smith PetscFunctionBegin; 28221dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 28231dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 282417699dbbSLois Curfman McInnes if (!rank) { 2825b0a32e0cSBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 28260752156aSBarry Smith ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 2827552e946dSBarry Smith if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 28286c5fab8fSBarry Smith } 28296c5fab8fSBarry Smith 2830b1d57f15SBarry Smith ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 2831416022c9SBarry Smith M = header[1]; N = header[2]; 2832416022c9SBarry Smith /* determine ownership of all rows */ 283329cdbbc8SSatish Balay m = M/size + ((M % size) > rank); 2834dc231df0SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 2835dc231df0SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 2836167e7480SBarry Smith 2837167e7480SBarry Smith /* First process needs enough room for process with most rows */ 2838167e7480SBarry Smith if (!rank) { 2839167e7480SBarry Smith mmax = rowners[1]; 2840167e7480SBarry Smith for (i=2; i<size; i++) { 2841167e7480SBarry Smith mmax = PetscMax(mmax,rowners[i]); 2842167e7480SBarry Smith } 2843167e7480SBarry Smith } else mmax = m; 2844167e7480SBarry Smith 2845416022c9SBarry Smith rowners[0] = 0; 284617699dbbSLois Curfman McInnes for (i=2; i<=size; i++) { 2847416022c9SBarry Smith rowners[i] += rowners[i-1]; 2848416022c9SBarry Smith } 284917699dbbSLois Curfman McInnes rstart = rowners[rank]; 285017699dbbSLois Curfman McInnes rend = rowners[rank+1]; 2851416022c9SBarry Smith 2852416022c9SBarry Smith /* distribute row lengths to all processors */ 2853167e7480SBarry Smith ierr = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr); 285417699dbbSLois Curfman McInnes if (!rank) { 2855dc231df0SBarry Smith ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 2856dc231df0SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 2857b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 2858b1d57f15SBarry Smith ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 2859dc231df0SBarry Smith for (j=0; j<m; j++) { 2860dc231df0SBarry Smith procsnz[0] += ourlens[j]; 2861dc231df0SBarry Smith } 2862dc231df0SBarry Smith for (i=1; i<size; i++) { 2863dc231df0SBarry Smith ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 2864dc231df0SBarry Smith /* calculate the number of nonzeros on each processor */ 2865dc231df0SBarry Smith for (j=0; j<rowners[i+1]-rowners[i]; j++) { 2866416022c9SBarry Smith procsnz[i] += rowlengths[j]; 2867416022c9SBarry Smith } 286813980483SBarry Smith mpicnt = PetscMPIIntCast(rowners[i+1]-rowners[i]); 286913980483SBarry Smith ierr = MPI_Send(rowlengths,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2870416022c9SBarry Smith } 2871606d414cSSatish Balay ierr = PetscFree(rowlengths);CHKERRQ(ierr); 2872dc231df0SBarry Smith } else { 287313980483SBarry Smith mpicnt = PetscMPIIntCast(m);CHKERRQ(ierr); 287413980483SBarry Smith ierr = MPI_Recv(ourlens,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 2875dc231df0SBarry Smith } 2876416022c9SBarry Smith 2877dc231df0SBarry Smith if (!rank) { 2878416022c9SBarry Smith /* determine max buffer needed and allocate it */ 2879416022c9SBarry Smith maxnz = 0; 28808a8e0b3aSBarry Smith for (i=0; i<size; i++) { 28810452661fSBarry Smith maxnz = PetscMax(maxnz,procsnz[i]); 2882416022c9SBarry Smith } 2883b1d57f15SBarry Smith ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2884416022c9SBarry Smith 2885416022c9SBarry Smith /* read in my part of the matrix column indices */ 2886416022c9SBarry Smith nz = procsnz[0]; 2887b1d57f15SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 28880752156aSBarry Smith ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 2889d65a2f8fSBarry Smith 2890d65a2f8fSBarry Smith /* read in every one elses and ship off */ 289117699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 2892d65a2f8fSBarry Smith nz = procsnz[i]; 28930752156aSBarry Smith ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 289413980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 289513980483SBarry Smith ierr = MPI_Send(cols,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2896d65a2f8fSBarry Smith } 2897606d414cSSatish Balay ierr = PetscFree(cols);CHKERRQ(ierr); 28983a40ed3dSBarry Smith } else { 2899416022c9SBarry Smith /* determine buffer space needed for message */ 2900416022c9SBarry Smith nz = 0; 2901416022c9SBarry Smith for (i=0; i<m; i++) { 2902416022c9SBarry Smith nz += ourlens[i]; 2903416022c9SBarry Smith } 2904dc231df0SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 2905416022c9SBarry Smith 2906416022c9SBarry Smith /* receive message of column indices*/ 290713980483SBarry Smith mpicnt = PetscMPIIntCast(nz);CHKERRQ(ierr); 290813980483SBarry Smith ierr = MPI_Recv(mycols,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 290913980483SBarry Smith ierr = MPI_Get_count(&status,MPIU_INT,&mpimaxnz);CHKERRQ(ierr); 29107c533972SBarry Smith if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);} 291113980483SBarry Smith else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);} 291213980483SBarry Smith else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);} 2913416022c9SBarry Smith } 2914416022c9SBarry Smith 2915b362ba68SBarry Smith /* determine column ownership if matrix is not square */ 2916b362ba68SBarry Smith if (N != M) { 2917b362ba68SBarry Smith n = N/size + ((N % size) > rank); 2918b1d57f15SBarry Smith ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 2919b362ba68SBarry Smith cstart = cend - n; 2920b362ba68SBarry Smith } else { 2921b362ba68SBarry Smith cstart = rstart; 2922b362ba68SBarry Smith cend = rend; 2923fb2e594dSBarry Smith n = cend - cstart; 2924b362ba68SBarry Smith } 2925b362ba68SBarry Smith 2926416022c9SBarry Smith /* loop over local rows, determining number of off diagonal entries */ 2927b1d57f15SBarry Smith ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 2928416022c9SBarry Smith jj = 0; 2929416022c9SBarry Smith for (i=0; i<m; i++) { 2930416022c9SBarry Smith for (j=0; j<ourlens[i]; j++) { 2931b362ba68SBarry Smith if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 2932416022c9SBarry Smith jj++; 2933416022c9SBarry Smith } 2934416022c9SBarry Smith } 2935d65a2f8fSBarry Smith 2936d65a2f8fSBarry Smith /* create our matrix */ 2937416022c9SBarry Smith for (i=0; i<m; i++) { 2938416022c9SBarry Smith ourlens[i] -= offlens[i]; 2939416022c9SBarry Smith } 2940f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&A);CHKERRQ(ierr); 2941f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr); 2942d10c748bSKris Buschelman ierr = MatSetType(A,type);CHKERRQ(ierr); 2943d10c748bSKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr); 2944d10c748bSKris Buschelman 2945d65a2f8fSBarry Smith for (i=0; i<m; i++) { 2946d65a2f8fSBarry Smith ourlens[i] += offlens[i]; 2947d65a2f8fSBarry Smith } 2948416022c9SBarry Smith 294917699dbbSLois Curfman McInnes if (!rank) { 2950906b51c7SHong Zhang ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 2951416022c9SBarry Smith 2952416022c9SBarry Smith /* read in my part of the matrix numerical values */ 2953416022c9SBarry Smith nz = procsnz[0]; 29540752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 2955d65a2f8fSBarry Smith 2956d65a2f8fSBarry Smith /* insert into matrix */ 2957d65a2f8fSBarry Smith jj = rstart; 2958d65a2f8fSBarry Smith smycols = mycols; 2959d65a2f8fSBarry Smith svals = vals; 2960d65a2f8fSBarry Smith for (i=0; i<m; i++) { 2961dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 2962d65a2f8fSBarry Smith smycols += ourlens[i]; 2963d65a2f8fSBarry Smith svals += ourlens[i]; 2964d65a2f8fSBarry Smith jj++; 2965416022c9SBarry Smith } 2966416022c9SBarry Smith 2967d65a2f8fSBarry Smith /* read in other processors and ship out */ 296817699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 2969416022c9SBarry Smith nz = procsnz[i]; 29700752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 297113980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 297213980483SBarry Smith ierr = MPI_Send(vals,mpicnt,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr); 2973416022c9SBarry Smith } 2974606d414cSSatish Balay ierr = PetscFree(procsnz);CHKERRQ(ierr); 29753a40ed3dSBarry Smith } else { 2976d65a2f8fSBarry Smith /* receive numeric values */ 297787828ca2SBarry Smith ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 2978416022c9SBarry Smith 2979d65a2f8fSBarry Smith /* receive message of values*/ 298013980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 298113980483SBarry Smith ierr = MPI_Recv(vals,mpicnt,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr); 298213980483SBarry Smith ierr = MPI_Get_count(&status,MPIU_SCALAR,&mpimaxnz);CHKERRQ(ierr); 29837c533972SBarry Smith if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);} 298413980483SBarry Smith else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);} 298513980483SBarry Smith else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);} 2986d65a2f8fSBarry Smith 2987d65a2f8fSBarry Smith /* insert into matrix */ 2988d65a2f8fSBarry Smith jj = rstart; 2989d65a2f8fSBarry Smith smycols = mycols; 2990d65a2f8fSBarry Smith svals = vals; 2991d65a2f8fSBarry Smith for (i=0; i<m; i++) { 2992dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 2993d65a2f8fSBarry Smith smycols += ourlens[i]; 2994d65a2f8fSBarry Smith svals += ourlens[i]; 2995d65a2f8fSBarry Smith jj++; 2996d65a2f8fSBarry Smith } 2997d65a2f8fSBarry Smith } 2998dc231df0SBarry Smith ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 2999606d414cSSatish Balay ierr = PetscFree(vals);CHKERRQ(ierr); 3000606d414cSSatish Balay ierr = PetscFree(mycols);CHKERRQ(ierr); 3001606d414cSSatish Balay ierr = PetscFree(rowners);CHKERRQ(ierr); 3002d65a2f8fSBarry Smith 30036d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 30046d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3005d10c748bSKris Buschelman *newmat = A; 30063a40ed3dSBarry Smith PetscFunctionReturn(0); 3007416022c9SBarry Smith } 3008a0ff6018SBarry Smith 30094a2ae208SSatish Balay #undef __FUNCT__ 30104a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 3011a0ff6018SBarry Smith /* 301229da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 301329da9460SBarry Smith in local and then by concatenating the local matrices the end result. 301429da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 3015a0ff6018SBarry Smith */ 3016b1d57f15SBarry Smith PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3017a0ff6018SBarry Smith { 3018dfbe8321SBarry Smith PetscErrorCode ierr; 301932dcc486SBarry Smith PetscMPIInt rank,size; 3020b1d57f15SBarry Smith PetscInt i,m,n,rstart,row,rend,nz,*cwork,j; 3021b1d57f15SBarry Smith PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal; 3022fee21e36SBarry Smith Mat *local,M,Mreuse; 3023a77337e4SBarry Smith MatScalar *vwork,*aa; 30247adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 302500e6dbe6SBarry Smith Mat_SeqAIJ *aij; 30267e2c5f70SBarry Smith 3027a0ff6018SBarry Smith 3028a0ff6018SBarry Smith PetscFunctionBegin; 30291dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 30301dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 303100e6dbe6SBarry Smith 3032fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3033fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr); 3034e005ede5SBarry Smith if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3035fee21e36SBarry Smith local = &Mreuse; 3036fee21e36SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr); 3037fee21e36SBarry Smith } else { 3038a0ff6018SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr); 3039fee21e36SBarry Smith Mreuse = *local; 3040606d414cSSatish Balay ierr = PetscFree(local);CHKERRQ(ierr); 3041fee21e36SBarry Smith } 3042a0ff6018SBarry Smith 3043a0ff6018SBarry Smith /* 3044a0ff6018SBarry Smith m - number of local rows 3045a0ff6018SBarry Smith n - number of columns (same on all processors) 3046a0ff6018SBarry Smith rstart - first row in new global matrix generated 3047a0ff6018SBarry Smith */ 3048fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3049a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3050fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 305100e6dbe6SBarry Smith ii = aij->i; 305200e6dbe6SBarry Smith jj = aij->j; 305300e6dbe6SBarry Smith 3054a0ff6018SBarry Smith /* 305500e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 305600e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3057a0ff6018SBarry Smith */ 305800e6dbe6SBarry Smith 305900e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 30606a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3061ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3062ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3063e2c4fddaSBarry Smith nlocal = m; 30646a6a5d1dSBarry Smith } else { 3065ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3066ab50ec6bSBarry Smith } 3067ab50ec6bSBarry Smith } else { 30686a6a5d1dSBarry Smith nlocal = csize; 30696a6a5d1dSBarry Smith } 3070b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 307100e6dbe6SBarry Smith rstart = rend - nlocal; 30726a6a5d1dSBarry Smith if (rank == size - 1 && rend != n) { 307377431f27SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 30746a6a5d1dSBarry Smith } 307500e6dbe6SBarry Smith 307600e6dbe6SBarry Smith /* next, compute all the lengths */ 3077b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 307800e6dbe6SBarry Smith olens = dlens + m; 307900e6dbe6SBarry Smith for (i=0; i<m; i++) { 308000e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 308100e6dbe6SBarry Smith olen = 0; 308200e6dbe6SBarry Smith dlen = 0; 308300e6dbe6SBarry Smith for (j=0; j<jend; j++) { 308400e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 308500e6dbe6SBarry Smith else dlen++; 308600e6dbe6SBarry Smith jj++; 308700e6dbe6SBarry Smith } 308800e6dbe6SBarry Smith olens[i] = olen; 308900e6dbe6SBarry Smith dlens[i] = dlen; 309000e6dbe6SBarry Smith } 3091f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3092f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 30937adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3094e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3095606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3096a0ff6018SBarry Smith } else { 3097b1d57f15SBarry Smith PetscInt ml,nl; 3098a0ff6018SBarry Smith 3099a0ff6018SBarry Smith M = *newmat; 3100a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 310129bbc08cSBarry Smith if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3102a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3103c48de900SBarry Smith /* 3104c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3105c48de900SBarry Smith rather than the slower MatSetValues(). 3106c48de900SBarry Smith */ 3107c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3108c48de900SBarry Smith M->assembled = PETSC_FALSE; 3109a0ff6018SBarry Smith } 3110a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3111fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 311200e6dbe6SBarry Smith ii = aij->i; 311300e6dbe6SBarry Smith jj = aij->j; 311400e6dbe6SBarry Smith aa = aij->a; 3115a0ff6018SBarry Smith for (i=0; i<m; i++) { 3116a0ff6018SBarry Smith row = rstart + i; 311700e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 311800e6dbe6SBarry Smith cwork = jj; jj += nz; 311900e6dbe6SBarry Smith vwork = aa; aa += nz; 31208c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3121a0ff6018SBarry Smith } 3122a0ff6018SBarry Smith 3123a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3124a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3125a0ff6018SBarry Smith *newmat = M; 3126fee21e36SBarry Smith 3127fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3128fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3129fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3130fee21e36SBarry Smith ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr); 3131fee21e36SBarry Smith } 3132fee21e36SBarry Smith 3133a0ff6018SBarry Smith PetscFunctionReturn(0); 3134a0ff6018SBarry Smith } 3135273d9f13SBarry Smith 3136e2e86b8fSSatish Balay EXTERN_C_BEGIN 31374a2ae208SSatish Balay #undef __FUNCT__ 3138ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 3139b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3140ccd8e176SBarry Smith { 3141899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3142899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3143ccd8e176SBarry Smith const PetscInt *JJ; 3144ccd8e176SBarry Smith PetscScalar *values; 3145ccd8e176SBarry Smith PetscErrorCode ierr; 3146ccd8e176SBarry Smith 3147ccd8e176SBarry Smith PetscFunctionBegin; 3148b7940d39SSatish Balay if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3149899cda47SBarry Smith 31507408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr); 31517408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr); 3152d0f46423SBarry Smith ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr); 3153d0f46423SBarry Smith ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr); 3154d0f46423SBarry Smith m = B->rmap->n; 3155d0f46423SBarry Smith cstart = B->cmap->rstart; 3156d0f46423SBarry Smith cend = B->cmap->rend; 3157d0f46423SBarry Smith rstart = B->rmap->rstart; 3158899cda47SBarry Smith 3159ccd8e176SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 3160ccd8e176SBarry Smith o_nnz = d_nnz + m; 3161ccd8e176SBarry Smith 3162ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3163ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3164ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3165ecc77c7aSBarry Smith JJ = J + Ii[i]; 3166ecc77c7aSBarry Smith if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3167ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3168d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3169ecc77c7aSBarry Smith for (j=1; j<nnz; j++) { 3170ecc77c7aSBarry Smith if (JJ[i] <= JJ[i-1]) SETERRRQ(PETSC_ERR_ARG_WRONGSTATE,"Row %D has unsorted column index at %D location in column indices",i,j); 3171ecc77c7aSBarry Smith } 3172ecc77c7aSBarry Smith } 3173ecc77c7aSBarry Smith #endif 3174ecc77c7aSBarry Smith 3175ccd8e176SBarry Smith for (i=0; i<m; i++) { 3176b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3177b7940d39SSatish Balay JJ = J + Ii[i]; 3178ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3179ccd8e176SBarry Smith for (j=0; j<nnz; j++) { 3180ccd8e176SBarry Smith if (*JJ >= cstart) break; 3181ccd8e176SBarry Smith JJ++; 3182ccd8e176SBarry Smith } 3183ccd8e176SBarry Smith d = 0; 3184ccd8e176SBarry Smith for (; j<nnz; j++) { 3185ccd8e176SBarry Smith if (*JJ++ >= cend) break; 3186ccd8e176SBarry Smith d++; 3187ccd8e176SBarry Smith } 3188ccd8e176SBarry Smith d_nnz[i] = d; 3189ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3190ccd8e176SBarry Smith } 3191ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 3192ccd8e176SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 3193ccd8e176SBarry Smith 3194ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3195ccd8e176SBarry Smith else { 3196ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3197ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3198ccd8e176SBarry Smith } 3199ccd8e176SBarry Smith 3200ccd8e176SBarry Smith for (i=0; i<m; i++) { 3201ccd8e176SBarry Smith ii = i + rstart; 3202b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3203b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3204ccd8e176SBarry Smith } 3205ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3206ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3207ccd8e176SBarry Smith 3208ccd8e176SBarry Smith if (!v) { 3209ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3210ccd8e176SBarry Smith } 3211ccd8e176SBarry Smith PetscFunctionReturn(0); 3212ccd8e176SBarry Smith } 3213e2e86b8fSSatish Balay EXTERN_C_END 3214ccd8e176SBarry Smith 3215ccd8e176SBarry Smith #undef __FUNCT__ 3216ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 32171eea217eSSatish Balay /*@ 3218ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3219ccd8e176SBarry Smith (the default parallel PETSc format). 3220ccd8e176SBarry Smith 3221ccd8e176SBarry Smith Collective on MPI_Comm 3222ccd8e176SBarry Smith 3223ccd8e176SBarry Smith Input Parameters: 3224a1661176SMatthew Knepley + B - the matrix 3225ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 3226ccd8e176SBarry Smith . j - the column indices for each local row (starts with zero) these must be sorted for each row 3227ccd8e176SBarry Smith - v - optional values in the matrix 3228ccd8e176SBarry Smith 3229ccd8e176SBarry Smith Level: developer 3230ccd8e176SBarry Smith 323112251496SSatish Balay Notes: 323212251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 323312251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 323412251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 323512251496SSatish Balay 323612251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 323712251496SSatish Balay 323812251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 323912251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 324012251496SSatish Balay as shown: 324112251496SSatish Balay 324212251496SSatish Balay 1 0 0 324312251496SSatish Balay 2 0 3 P0 324412251496SSatish Balay ------- 324512251496SSatish Balay 4 5 6 P1 324612251496SSatish Balay 324712251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 324812251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 324912251496SSatish Balay j = {0,0,2} [size = nz = 6] 325012251496SSatish Balay v = {1,2,3} [size = nz = 6] 325112251496SSatish Balay 325212251496SSatish Balay Process1 [P1]: rows_owned=[2] 325312251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 325412251496SSatish Balay j = {0,1,2} [size = nz = 6] 325512251496SSatish Balay v = {4,5,6} [size = nz = 6] 325612251496SSatish Balay 3257ecc77c7aSBarry Smith The column indices for each row MUST be sorted. 32582fb0ec9aSBarry Smith 3259ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3260ccd8e176SBarry Smith 32612fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ, 32628d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3263ccd8e176SBarry Smith @*/ 3264be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3265ccd8e176SBarry Smith { 3266ccd8e176SBarry Smith PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 3267ccd8e176SBarry Smith 3268ccd8e176SBarry Smith PetscFunctionBegin; 3269ccd8e176SBarry Smith ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr); 3270ccd8e176SBarry Smith if (f) { 3271ccd8e176SBarry Smith ierr = (*f)(B,i,j,v);CHKERRQ(ierr); 3272ccd8e176SBarry Smith } 3273ccd8e176SBarry Smith PetscFunctionReturn(0); 3274ccd8e176SBarry Smith } 3275ccd8e176SBarry Smith 3276ccd8e176SBarry Smith #undef __FUNCT__ 32774a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3278273d9f13SBarry Smith /*@C 3279ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3280273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3281273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3282273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3283273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3284273d9f13SBarry Smith 3285273d9f13SBarry Smith Collective on MPI_Comm 3286273d9f13SBarry Smith 3287273d9f13SBarry Smith Input Parameters: 3288273d9f13SBarry Smith + A - the matrix 3289273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3290273d9f13SBarry Smith (same value is used for all local rows) 3291273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3292273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3293273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3294273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3295273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3296273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3297273d9f13SBarry Smith submatrix (same value is used for all local rows). 3298273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3299273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3300273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3301273d9f13SBarry Smith structure. The size of this array is equal to the number 3302273d9f13SBarry Smith of local rows, i.e 'm'. 3303273d9f13SBarry Smith 330449a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 330549a6f317SBarry Smith 3306273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 3307ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 3308ccd8e176SBarry Smith storage. The stored row and column indices begin with zero. See the users manual for details. 3309273d9f13SBarry Smith 3310273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 3311273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 3312273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 3313273d9f13SBarry Smith 3314273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 3315273d9f13SBarry Smith as the submatrix which is obtained by extraction the part corresponding 3316273d9f13SBarry Smith to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the 3317273d9f13SBarry Smith first row that belongs to the processor, and r2 is the last row belonging 3318273d9f13SBarry Smith to the this processor. This is a square mxm matrix. The remaining portion 3319273d9f13SBarry Smith of the local submatrix (mxN) constitute the OFF-DIAGONAL portion. 3320273d9f13SBarry Smith 3321273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3322273d9f13SBarry Smith 3323aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 3324aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3325aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 3326aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 3327aa95bbe8SBarry Smith 3328273d9f13SBarry Smith Example usage: 3329273d9f13SBarry Smith 3330273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3331273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3332273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3333273d9f13SBarry Smith as follows: 3334273d9f13SBarry Smith 3335273d9f13SBarry Smith .vb 3336273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3337273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3338273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3339273d9f13SBarry Smith ------------------------------------- 3340273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3341273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3342273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3343273d9f13SBarry Smith ------------------------------------- 3344273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3345273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3346273d9f13SBarry Smith .ve 3347273d9f13SBarry Smith 3348273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3349273d9f13SBarry Smith 3350273d9f13SBarry Smith .vb 3351273d9f13SBarry Smith A B C 3352273d9f13SBarry Smith D E F 3353273d9f13SBarry Smith G H I 3354273d9f13SBarry Smith .ve 3355273d9f13SBarry Smith 3356273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3357273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3358273d9f13SBarry Smith 3359273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3360273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3361273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3362273d9f13SBarry Smith 3363273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3364273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3365273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3366273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3367273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3368273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3369273d9f13SBarry Smith 3370273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3371273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3372273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3373273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3374273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3375273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3376273d9f13SBarry Smith .vb 3377273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3378273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3379273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3380273d9f13SBarry Smith .ve 3381273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3382273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3383273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3384273d9f13SBarry Smith 34 values. 3385273d9f13SBarry Smith 3386273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3387273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3388273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3389273d9f13SBarry Smith .vb 3390273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3391273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3392273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3393273d9f13SBarry Smith .ve 3394273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3395273d9f13SBarry Smith hence pre-allocation is perfect. 3396273d9f13SBarry Smith 3397273d9f13SBarry Smith Level: intermediate 3398273d9f13SBarry Smith 3399273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3400273d9f13SBarry Smith 3401ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(), 3402aa95bbe8SBarry Smith MPIAIJ, MatGetInfo() 3403273d9f13SBarry Smith @*/ 3404be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3405273d9f13SBarry Smith { 3406b1d57f15SBarry Smith PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 3407273d9f13SBarry Smith 3408273d9f13SBarry Smith PetscFunctionBegin; 3409a23d5eceSKris Buschelman ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 3410a23d5eceSKris Buschelman if (f) { 3411a23d5eceSKris Buschelman ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3412273d9f13SBarry Smith } 3413273d9f13SBarry Smith PetscFunctionReturn(0); 3414273d9f13SBarry Smith } 3415273d9f13SBarry Smith 34164a2ae208SSatish Balay #undef __FUNCT__ 34172fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 341858d36128SBarry Smith /*@ 34192fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 34202fb0ec9aSBarry Smith CSR format the local rows. 34212fb0ec9aSBarry Smith 34222fb0ec9aSBarry Smith Collective on MPI_Comm 34232fb0ec9aSBarry Smith 34242fb0ec9aSBarry Smith Input Parameters: 34252fb0ec9aSBarry Smith + comm - MPI communicator 34262fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 34272fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 34282fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 34292fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 34302fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 34312fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 34322fb0ec9aSBarry Smith . i - row indices 34332fb0ec9aSBarry Smith . j - column indices 34342fb0ec9aSBarry Smith - a - matrix values 34352fb0ec9aSBarry Smith 34362fb0ec9aSBarry Smith Output Parameter: 34372fb0ec9aSBarry Smith . mat - the matrix 343803bfb495SBarry Smith 34392fb0ec9aSBarry Smith Level: intermediate 34402fb0ec9aSBarry Smith 34412fb0ec9aSBarry Smith Notes: 34422fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 34432fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 34448d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 34452fb0ec9aSBarry Smith 344612251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 344712251496SSatish Balay 344812251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 344912251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 345012251496SSatish Balay as shown: 345112251496SSatish Balay 345212251496SSatish Balay 1 0 0 345312251496SSatish Balay 2 0 3 P0 345412251496SSatish Balay ------- 345512251496SSatish Balay 4 5 6 P1 345612251496SSatish Balay 345712251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 345812251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 345912251496SSatish Balay j = {0,0,2} [size = nz = 6] 346012251496SSatish Balay v = {1,2,3} [size = nz = 6] 346112251496SSatish Balay 346212251496SSatish Balay Process1 [P1]: rows_owned=[2] 346312251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 346412251496SSatish Balay j = {0,1,2} [size = nz = 6] 346512251496SSatish Balay v = {4,5,6} [size = nz = 6] 34662fb0ec9aSBarry Smith 34672fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 34682fb0ec9aSBarry Smith 34692fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 34708d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays() 34712fb0ec9aSBarry Smith @*/ 347282b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 34732fb0ec9aSBarry Smith { 34742fb0ec9aSBarry Smith PetscErrorCode ierr; 34752fb0ec9aSBarry Smith 34762fb0ec9aSBarry Smith PetscFunctionBegin; 34772fb0ec9aSBarry Smith if (i[0]) { 34782fb0ec9aSBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 34792fb0ec9aSBarry Smith } 34802fb0ec9aSBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 34812fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 3482d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 34832fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 34842fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 34852fb0ec9aSBarry Smith PetscFunctionReturn(0); 34862fb0ec9aSBarry Smith } 34872fb0ec9aSBarry Smith 34882fb0ec9aSBarry Smith #undef __FUNCT__ 34894a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ" 3490273d9f13SBarry Smith /*@C 3491273d9f13SBarry Smith MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format 3492273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3493273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3494273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3495273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3496273d9f13SBarry Smith 3497273d9f13SBarry Smith Collective on MPI_Comm 3498273d9f13SBarry Smith 3499273d9f13SBarry Smith Input Parameters: 3500273d9f13SBarry Smith + comm - MPI communicator 3501273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 3502273d9f13SBarry Smith This value should be the same as the local size used in creating the 3503273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 3504273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 3505273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 3506273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 3507273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 3508273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 3509273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3510273d9f13SBarry Smith (same value is used for all local rows) 3511273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3512273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3513273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3514273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3515273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3516273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3517273d9f13SBarry Smith submatrix (same value is used for all local rows). 3518273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3519273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3520273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3521273d9f13SBarry Smith structure. The size of this array is equal to the number 3522273d9f13SBarry Smith of local rows, i.e 'm'. 3523273d9f13SBarry Smith 3524273d9f13SBarry Smith Output Parameter: 3525273d9f13SBarry Smith . A - the matrix 3526273d9f13SBarry Smith 3527175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 3528ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 3529175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 3530175b88e8SBarry Smith 3531273d9f13SBarry Smith Notes: 353249a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 353349a6f317SBarry Smith 3534273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 3535273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 3536273d9f13SBarry Smith storage requirements for this matrix. 3537273d9f13SBarry Smith 3538273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 3539273d9f13SBarry Smith processor than it must be used on all processors that share the object for 3540273d9f13SBarry Smith that argument. 3541273d9f13SBarry Smith 3542273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 3543273d9f13SBarry Smith (possibly both). 3544273d9f13SBarry Smith 354533a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 354633a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 354733a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 354833a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 354933a7c187SSatish Balay values corresponding to [m x N] submatrix. 3550273d9f13SBarry Smith 355133a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 355233a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 355333a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 355433a7c187SSatish Balay 355533a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 355633a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 355733a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 355833a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 355933a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 356033a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 356133a7c187SSatish Balay illustrates this concept. 356233a7c187SSatish Balay 356333a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 356433a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 356533a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 356633a7c187SSatish Balay local matrix (a rectangular submatrix). 3567273d9f13SBarry Smith 3568273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3569273d9f13SBarry Smith 357097d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 357197d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 357297d05335SKris Buschelman type of communicator, use the construction mechanism: 357397d05335SKris Buschelman MatCreate(...,&A); MatSetType(A,MPIAIJ); MatMPIAIJSetPreallocation(A,...); 357497d05335SKris Buschelman 3575273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 3576273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 3577273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 3578273d9f13SBarry Smith 3579273d9f13SBarry Smith Options Database Keys: 3580923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 3581923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3582273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 3583273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 3584273d9f13SBarry Smith the user still MUST index entries starting at 0! 3585273d9f13SBarry Smith 3586273d9f13SBarry Smith 3587273d9f13SBarry Smith Example usage: 3588273d9f13SBarry Smith 3589273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3590273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3591273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3592273d9f13SBarry Smith as follows: 3593273d9f13SBarry Smith 3594273d9f13SBarry Smith .vb 3595273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3596273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3597273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3598273d9f13SBarry Smith ------------------------------------- 3599273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3600273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3601273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3602273d9f13SBarry Smith ------------------------------------- 3603273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3604273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3605273d9f13SBarry Smith .ve 3606273d9f13SBarry Smith 3607273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3608273d9f13SBarry Smith 3609273d9f13SBarry Smith .vb 3610273d9f13SBarry Smith A B C 3611273d9f13SBarry Smith D E F 3612273d9f13SBarry Smith G H I 3613273d9f13SBarry Smith .ve 3614273d9f13SBarry Smith 3615273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3616273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3617273d9f13SBarry Smith 3618273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3619273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3620273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3621273d9f13SBarry Smith 3622273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3623273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3624273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3625273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3626273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3627273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3628273d9f13SBarry Smith 3629273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3630273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3631273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3632273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3633273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3634273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3635273d9f13SBarry Smith .vb 3636273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3637273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3638273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3639273d9f13SBarry Smith .ve 3640273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3641273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3642273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3643273d9f13SBarry Smith 34 values. 3644273d9f13SBarry Smith 3645273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3646273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3647273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3648273d9f13SBarry Smith .vb 3649273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3650273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3651273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3652273d9f13SBarry Smith .ve 3653273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3654273d9f13SBarry Smith hence pre-allocation is perfect. 3655273d9f13SBarry Smith 3656273d9f13SBarry Smith Level: intermediate 3657273d9f13SBarry Smith 3658273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3659273d9f13SBarry Smith 3660ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 36612fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 3662273d9f13SBarry Smith @*/ 3663be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 3664273d9f13SBarry Smith { 36656849ba73SBarry Smith PetscErrorCode ierr; 3666b1d57f15SBarry Smith PetscMPIInt size; 3667273d9f13SBarry Smith 3668273d9f13SBarry Smith PetscFunctionBegin; 3669f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 3670f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 3671273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 3672273d9f13SBarry Smith if (size > 1) { 3673273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 3674273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3675273d9f13SBarry Smith } else { 3676273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 3677273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 3678273d9f13SBarry Smith } 3679273d9f13SBarry Smith PetscFunctionReturn(0); 3680273d9f13SBarry Smith } 3681195d93cdSBarry Smith 36824a2ae208SSatish Balay #undef __FUNCT__ 36834a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 3684be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[]) 3685195d93cdSBarry Smith { 3686195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 3687b1d57f15SBarry Smith 3688195d93cdSBarry Smith PetscFunctionBegin; 3689195d93cdSBarry Smith *Ad = a->A; 3690195d93cdSBarry Smith *Ao = a->B; 3691195d93cdSBarry Smith *colmap = a->garray; 3692195d93cdSBarry Smith PetscFunctionReturn(0); 3693195d93cdSBarry Smith } 3694a2243be0SBarry Smith 3695a2243be0SBarry Smith #undef __FUNCT__ 3696a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 3697dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 3698a2243be0SBarry Smith { 3699dfbe8321SBarry Smith PetscErrorCode ierr; 3700b1d57f15SBarry Smith PetscInt i; 3701a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3702a2243be0SBarry Smith 3703a2243be0SBarry Smith PetscFunctionBegin; 37048ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 370508b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 3706a2243be0SBarry Smith ISColoring ocoloring; 3707a2243be0SBarry Smith 3708a2243be0SBarry Smith /* set coloring for diagonal portion */ 3709a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 3710a2243be0SBarry Smith 3711a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 37127adad957SLisandro Dalcin ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr); 3713d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3714d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3715a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 3716a2243be0SBarry Smith } 3717a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 3718d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3719a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3720a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3721a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 372208b6dcc0SBarry Smith ISColoringValue *colors; 3723b1d57f15SBarry Smith PetscInt *larray; 3724a2243be0SBarry Smith ISColoring ocoloring; 3725a2243be0SBarry Smith 3726a2243be0SBarry Smith /* set coloring for diagonal portion */ 3727d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3728d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3729d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 3730a2243be0SBarry Smith } 3731d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr); 3732d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3733d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3734a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3735a2243be0SBarry Smith } 3736a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3737d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3738a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 3739a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3740a2243be0SBarry Smith 3741a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 3742d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3743d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr); 3744d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3745d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3746a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3747a2243be0SBarry Smith } 3748a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3749d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3750a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3751a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3752a2243be0SBarry Smith } else { 375377431f27SBarry Smith SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 3754a2243be0SBarry Smith } 3755a2243be0SBarry Smith 3756a2243be0SBarry Smith PetscFunctionReturn(0); 3757a2243be0SBarry Smith } 3758a2243be0SBarry Smith 3759dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 3760a2243be0SBarry Smith #undef __FUNCT__ 3761779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ" 3762dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues) 3763a2243be0SBarry Smith { 3764a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3765dfbe8321SBarry Smith PetscErrorCode ierr; 3766a2243be0SBarry Smith 3767a2243be0SBarry Smith PetscFunctionBegin; 3768779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr); 3769779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr); 3770779c1a83SBarry Smith PetscFunctionReturn(0); 3771779c1a83SBarry Smith } 3772dcf5cc72SBarry Smith #endif 3773779c1a83SBarry Smith 3774779c1a83SBarry Smith #undef __FUNCT__ 3775779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 3776b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 3777779c1a83SBarry Smith { 3778779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3779dfbe8321SBarry Smith PetscErrorCode ierr; 3780779c1a83SBarry Smith 3781779c1a83SBarry Smith PetscFunctionBegin; 3782779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 3783779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 3784a2243be0SBarry Smith PetscFunctionReturn(0); 3785a2243be0SBarry Smith } 3786c5d6d63eSBarry Smith 3787c5d6d63eSBarry Smith #undef __FUNCT__ 378851dd7536SBarry Smith #define __FUNCT__ "MatMerge" 3789bc08b0f1SBarry Smith /*@ 379051dd7536SBarry Smith MatMerge - Creates a single large PETSc matrix by concatinating sequential 379151dd7536SBarry Smith matrices from each processor 3792c5d6d63eSBarry Smith 3793c5d6d63eSBarry Smith Collective on MPI_Comm 3794c5d6d63eSBarry Smith 3795c5d6d63eSBarry Smith Input Parameters: 379651dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 3797d6bb3c2dSHong Zhang . inmat - the input sequential matrices 37980e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3799d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 380051dd7536SBarry Smith 380151dd7536SBarry Smith Output Parameter: 380251dd7536SBarry Smith . outmat - the parallel matrix generated 3803c5d6d63eSBarry Smith 38047e25d530SSatish Balay Level: advanced 38057e25d530SSatish Balay 3806f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 3807c5d6d63eSBarry Smith 3808c5d6d63eSBarry Smith @*/ 3809be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 3810c5d6d63eSBarry Smith { 3811dfbe8321SBarry Smith PetscErrorCode ierr; 3812b7940d39SSatish Balay PetscInt m,N,i,rstart,nnz,Ii,*dnz,*onz; 3813ba8c8a56SBarry Smith PetscInt *indx; 3814ba8c8a56SBarry Smith PetscScalar *values; 3815c5d6d63eSBarry Smith 3816c5d6d63eSBarry Smith PetscFunctionBegin; 38170e36024fSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 3818d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 3819d6bb3c2dSHong Zhang /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */ 38200e36024fSHong Zhang if (n == PETSC_DECIDE){ 3821357abbc8SBarry Smith ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 38220e36024fSHong Zhang } 3823357abbc8SBarry Smith ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 3824357abbc8SBarry Smith rstart -= m; 3825d6bb3c2dSHong Zhang 3826d6bb3c2dSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 3827d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3828ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3829d6bb3c2dSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 3830ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3831d6bb3c2dSHong Zhang } 3832d6bb3c2dSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 3833f69a0ea3SMatthew Knepley ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 3834f69a0ea3SMatthew Knepley ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 3835d6bb3c2dSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 3836d6bb3c2dSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 3837d6bb3c2dSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 3838d6bb3c2dSHong Zhang 3839d6bb3c2dSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 3840d6bb3c2dSHong Zhang ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr); 3841d6bb3c2dSHong Zhang } else { 384277431f27SBarry Smith SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 3843d6bb3c2dSHong Zhang } 3844d6bb3c2dSHong Zhang 3845d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3846ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3847b7940d39SSatish Balay Ii = i + rstart; 3848b7940d39SSatish Balay ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3849ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3850d6bb3c2dSHong Zhang } 3851d6bb3c2dSHong Zhang ierr = MatDestroy(inmat);CHKERRQ(ierr); 3852d6bb3c2dSHong Zhang ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3853d6bb3c2dSHong Zhang ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 385451dd7536SBarry Smith 3855c5d6d63eSBarry Smith PetscFunctionReturn(0); 3856c5d6d63eSBarry Smith } 3857c5d6d63eSBarry Smith 3858c5d6d63eSBarry Smith #undef __FUNCT__ 3859c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 3860dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 3861c5d6d63eSBarry Smith { 3862dfbe8321SBarry Smith PetscErrorCode ierr; 386332dcc486SBarry Smith PetscMPIInt rank; 3864b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 3865de4209c5SBarry Smith size_t len; 3866b1d57f15SBarry Smith const PetscInt *indx; 3867c5d6d63eSBarry Smith PetscViewer out; 3868c5d6d63eSBarry Smith char *name; 3869c5d6d63eSBarry Smith Mat B; 3870b3cc6726SBarry Smith const PetscScalar *values; 3871c5d6d63eSBarry Smith 3872c5d6d63eSBarry Smith PetscFunctionBegin; 3873c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 3874c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 3875f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 3876f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 3877f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 3878f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 3879f204ca49SKris Buschelman ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr); 3880c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 3881c5d6d63eSBarry Smith for (i=0;i<m;i++) { 3882c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3883c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3884c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3885c5d6d63eSBarry Smith } 3886c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3887c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3888c5d6d63eSBarry Smith 38897adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 3890c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 3891c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 3892c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 3893852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 3894c5d6d63eSBarry Smith ierr = PetscFree(name); 3895c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 3896c5d6d63eSBarry Smith ierr = PetscViewerDestroy(out);CHKERRQ(ierr); 3897c5d6d63eSBarry Smith ierr = MatDestroy(B);CHKERRQ(ierr); 3898c5d6d63eSBarry Smith PetscFunctionReturn(0); 3899c5d6d63eSBarry Smith } 3900e5f2cdd8SHong Zhang 390151a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat); 390251a7d1a8SHong Zhang #undef __FUNCT__ 390351a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 3904be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 390551a7d1a8SHong Zhang { 390651a7d1a8SHong Zhang PetscErrorCode ierr; 3907671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 3908776b82aeSLisandro Dalcin PetscContainer container; 390951a7d1a8SHong Zhang 391051a7d1a8SHong Zhang PetscFunctionBegin; 3911671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 3912671beff6SHong Zhang if (container) { 3913776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 391451a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 39153e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 39163e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 391751a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 391851a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 391902c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 392002c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 392105b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 392205b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 392305b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 39242c72b5baSSatish Balay ierr = PetscFree(merge->rowmap.range);CHKERRQ(ierr); 3925671beff6SHong Zhang 3926776b82aeSLisandro Dalcin ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 3927671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 3928671beff6SHong Zhang } 392951a7d1a8SHong Zhang ierr = PetscFree(merge);CHKERRQ(ierr); 393051a7d1a8SHong Zhang 393151a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 393251a7d1a8SHong Zhang PetscFunctionReturn(0); 393351a7d1a8SHong Zhang } 393451a7d1a8SHong Zhang 39357c4f633dSBarry Smith #include "../src/mat/utils/freespace.h" 3936be0fcf8dSHong Zhang #include "petscbt.h" 39374ebed01fSBarry Smith 3938e5f2cdd8SHong Zhang #undef __FUNCT__ 393938f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric" 3940e5f2cdd8SHong Zhang /*@C 3941f08fae4eSHong Zhang MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential 3942e5f2cdd8SHong Zhang matrices from each processor 3943e5f2cdd8SHong Zhang 3944e5f2cdd8SHong Zhang Collective on MPI_Comm 3945e5f2cdd8SHong Zhang 3946e5f2cdd8SHong Zhang Input Parameters: 3947e5f2cdd8SHong Zhang + comm - the communicators the parallel matrix will live on 3948f08fae4eSHong Zhang . seqmat - the input sequential matrices 39490e36024fSHong Zhang . m - number of local rows (or PETSC_DECIDE) 39500e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3951e5f2cdd8SHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 3952e5f2cdd8SHong Zhang 3953e5f2cdd8SHong Zhang Output Parameter: 3954f08fae4eSHong Zhang . mpimat - the parallel matrix generated 3955e5f2cdd8SHong Zhang 3956e5f2cdd8SHong Zhang Level: advanced 3957e5f2cdd8SHong Zhang 3958affca5deSHong Zhang Notes: 3959affca5deSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 3960affca5deSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 3961affca5deSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 3962e5f2cdd8SHong Zhang @*/ 3963be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat) 396455d1abb9SHong Zhang { 396555d1abb9SHong Zhang PetscErrorCode ierr; 39667adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mpimat)->comm; 396755d1abb9SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 3968b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 3969d0f46423SBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j; 3970b1d57f15SBarry Smith PetscInt proc,m; 3971b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 3972b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 3973b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 397455d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 397555d1abb9SHong Zhang MPI_Status *status; 3976a77337e4SBarry Smith MatScalar *aa=a->a; 3977dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 397855d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 3979776b82aeSLisandro Dalcin PetscContainer container; 398055d1abb9SHong Zhang 398155d1abb9SHong Zhang PetscFunctionBegin; 39824ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 39833c2c1871SHong Zhang 398455d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 398555d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 398655d1abb9SHong Zhang 398755d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 398855d1abb9SHong Zhang if (container) { 3989776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 399055d1abb9SHong Zhang } 399155d1abb9SHong Zhang bi = merge->bi; 399255d1abb9SHong Zhang bj = merge->bj; 399355d1abb9SHong Zhang buf_ri = merge->buf_ri; 399455d1abb9SHong Zhang buf_rj = merge->buf_rj; 399555d1abb9SHong Zhang 399655d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 3997357abbc8SBarry Smith owners = merge->rowmap.range; 399855d1abb9SHong Zhang len_s = merge->len_s; 399955d1abb9SHong Zhang 400055d1abb9SHong Zhang /* send and recv matrix values */ 400155d1abb9SHong Zhang /*-----------------------------*/ 4002357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 400355d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 400455d1abb9SHong Zhang 400555d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 400655d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++){ 400755d1abb9SHong Zhang if (!len_s[proc]) continue; 400855d1abb9SHong Zhang i = owners[proc]; 400955d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 401055d1abb9SHong Zhang k++; 401155d1abb9SHong Zhang } 401255d1abb9SHong Zhang 40130c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 40140c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 401555d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 401655d1abb9SHong Zhang 401755d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 401855d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 401955d1abb9SHong Zhang 402055d1abb9SHong Zhang /* insert mat values of mpimat */ 402155d1abb9SHong Zhang /*----------------------------*/ 4022a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 4023b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 402455d1abb9SHong Zhang nextrow = buf_ri_k + merge->nrecv; 402555d1abb9SHong Zhang nextai = nextrow + merge->nrecv; 402655d1abb9SHong Zhang 402755d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ 402855d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 402955d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 403055d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 403155d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 403255d1abb9SHong Zhang } 403355d1abb9SHong Zhang 403455d1abb9SHong Zhang /* set values of ba */ 4035357abbc8SBarry Smith m = merge->rowmap.n; 403655d1abb9SHong Zhang for (i=0; i<m; i++) { 403755d1abb9SHong Zhang arow = owners[rank] + i; 403855d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 403955d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4040a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 404155d1abb9SHong Zhang 404255d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 404355d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 404455d1abb9SHong Zhang aj = a->j + ai[arow]; 404555d1abb9SHong Zhang aa = a->a + ai[arow]; 404655d1abb9SHong Zhang nextaj = 0; 404755d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 404855d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 404955d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 405055d1abb9SHong Zhang } 405155d1abb9SHong Zhang } 405255d1abb9SHong Zhang 405355d1abb9SHong Zhang /* add received vals into ba */ 405455d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 405555d1abb9SHong Zhang /* i-th row */ 405655d1abb9SHong Zhang if (i == *nextrow[k]) { 405755d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 405855d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 405955d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 406055d1abb9SHong Zhang nextaj = 0; 406155d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 406255d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 406355d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 406455d1abb9SHong Zhang } 406555d1abb9SHong Zhang } 406655d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 406755d1abb9SHong Zhang } 406855d1abb9SHong Zhang } 406955d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 407055d1abb9SHong Zhang } 407155d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 407255d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 407355d1abb9SHong Zhang 407455d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 407555d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 407655d1abb9SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 40774ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 407855d1abb9SHong Zhang PetscFunctionReturn(0); 407955d1abb9SHong Zhang } 408038f152feSBarry Smith 408138f152feSBarry Smith #undef __FUNCT__ 408238f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic" 4083be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4084e5f2cdd8SHong Zhang { 4085f08fae4eSHong Zhang PetscErrorCode ierr; 408655a3bba9SHong Zhang Mat B_mpi; 4087c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4088b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4089b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4090d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4091b1d57f15SBarry Smith PetscInt len,proc,*dnz,*onz; 4092b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4093b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 409455d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 409558cb9c82SHong Zhang MPI_Status *status; 4096a1a86e44SBarry Smith PetscFreeSpaceList free_space=PETSC_NULL,current_space=PETSC_NULL; 4097be0fcf8dSHong Zhang PetscBT lnkbt; 409851a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4099776b82aeSLisandro Dalcin PetscContainer container; 410002c68681SHong Zhang 4101e5f2cdd8SHong Zhang PetscFunctionBegin; 41024ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 41033c2c1871SHong Zhang 410438f152feSBarry Smith /* make sure it is a PETSc comm */ 410538f152feSBarry Smith ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr); 4106e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4107e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 410855d1abb9SHong Zhang 410951a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4110c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4111e5f2cdd8SHong Zhang 41126abd8857SHong Zhang /* determine row ownership */ 4113f08fae4eSHong Zhang /*---------------------------------------------------------*/ 4114b167c4dbSHong Zhang ierr = PetscMapInitialize(comm,&merge->rowmap);CHKERRQ(ierr); 4115899cda47SBarry Smith merge->rowmap.n = m; 4116899cda47SBarry Smith merge->rowmap.N = M; 4117fc42d0c8SSatish Balay merge->rowmap.bs = 1; 41186148ca0dSBarry Smith ierr = PetscMapSetUp(&merge->rowmap);CHKERRQ(ierr); 4119b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4120b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 412155d1abb9SHong Zhang 4122357abbc8SBarry Smith m = merge->rowmap.n; 4123357abbc8SBarry Smith M = merge->rowmap.N; 4124357abbc8SBarry Smith owners = merge->rowmap.range; 41256abd8857SHong Zhang 41266abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 41276abd8857SHong Zhang /*---------------------------------------------------------*/ 41283e06a4e6SHong Zhang len_s = merge->len_s; 412951a7d1a8SHong Zhang 41302257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4131c2234fe3SHong Zhang merge->nsend = 0; 4132409913e3SHong Zhang for (proc=0; proc<size; proc++){ 41332257cef7SHong Zhang len_si[proc] = 0; 41343e06a4e6SHong Zhang if (proc == rank){ 41356abd8857SHong Zhang len_s[proc] = 0; 41363e06a4e6SHong Zhang } else { 413702c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 41383e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 41393e06a4e6SHong Zhang } 41403e06a4e6SHong Zhang if (len_s[proc]) { 4141c2234fe3SHong Zhang merge->nsend++; 41422257cef7SHong Zhang nrows = 0; 41432257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 41442257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 41452257cef7SHong Zhang } 41462257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 41472257cef7SHong Zhang len += len_si[proc]; 4148409913e3SHong Zhang } 414958cb9c82SHong Zhang } 4150409913e3SHong Zhang 41512257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 41522257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 415351a7d1a8SHong Zhang ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 415455d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4155671beff6SHong Zhang 41563e06a4e6SHong Zhang /* post the Irecv of j-structure */ 41573e06a4e6SHong Zhang /*-------------------------------*/ 41582c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 41593e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 416002c68681SHong Zhang 41613e06a4e6SHong Zhang /* post the Isend of j-structure */ 4162affca5deSHong Zhang /*--------------------------------*/ 41632257cef7SHong Zhang ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr); 416402c68681SHong Zhang sj_waits = si_waits + merge->nsend; 41653e06a4e6SHong Zhang 41662257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++){ 4167409913e3SHong Zhang if (!len_s[proc]) continue; 416802c68681SHong Zhang i = owners[proc]; 4169b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 417051a7d1a8SHong Zhang k++; 417151a7d1a8SHong Zhang } 417251a7d1a8SHong Zhang 41733e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 41743e06a4e6SHong Zhang /*------------------------------------------------*/ 41750c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 41760c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 417702c68681SHong Zhang 417802c68681SHong Zhang /* send and recv i-structure */ 417902c68681SHong Zhang /*---------------------------*/ 41802c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 418102c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 418202c68681SHong Zhang 4183b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 41843e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 41852257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++){ 418602c68681SHong Zhang if (!len_s[proc]) continue; 41873e06a4e6SHong Zhang /* form outgoing message for i-structure: 41883e06a4e6SHong Zhang buf_si[0]: nrows to be sent 41893e06a4e6SHong Zhang [1:nrows]: row index (global) 41903e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 41913e06a4e6SHong Zhang */ 41923e06a4e6SHong Zhang /*-------------------------------------------*/ 41932257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 41943e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 41953e06a4e6SHong Zhang buf_si[0] = nrows; 41963e06a4e6SHong Zhang buf_si_i[0] = 0; 41973e06a4e6SHong Zhang nrows = 0; 41983e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 41993e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 42003e06a4e6SHong Zhang if (anzi) { 42013e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 42023e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 42033e06a4e6SHong Zhang nrows++; 42043e06a4e6SHong Zhang } 42053e06a4e6SHong Zhang } 4206b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 420702c68681SHong Zhang k++; 42082257cef7SHong Zhang buf_si += len_si[proc]; 420902c68681SHong Zhang } 42102257cef7SHong Zhang 42110c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 42120c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 421302c68681SHong Zhang 4214ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 42153e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++){ 4216ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 42173e06a4e6SHong Zhang } 42183e06a4e6SHong Zhang 42193e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 422002c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 422102c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 42223e06a4e6SHong Zhang ierr = PetscFree(si_waits);CHKERRQ(ierr); 42232257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 42243e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4225bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 422658cb9c82SHong Zhang 4227bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4228bcc1bcd5SHong Zhang /*----------------------------------------------*/ 422958cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4230b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 423158cb9c82SHong Zhang bi[0] = 0; 423258cb9c82SHong Zhang 4233be0fcf8dSHong Zhang /* create and initialize a linked list */ 4234be0fcf8dSHong Zhang nlnk = N+1; 4235be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 423658cb9c82SHong Zhang 4237bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 423858cb9c82SHong Zhang len = 0; 4239bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4240a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 424158cb9c82SHong Zhang current_space = free_space; 424258cb9c82SHong Zhang 4243bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 4244b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 42453e06a4e6SHong Zhang nextrow = buf_ri_k + merge->nrecv; 42463e06a4e6SHong Zhang nextai = nextrow + merge->nrecv; 42473e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++){ 42482257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 42493e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 42503e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 42512257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 42523e06a4e6SHong Zhang } 42532257cef7SHong Zhang 4254bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4255bcc1bcd5SHong Zhang len = 0; 425658cb9c82SHong Zhang for (i=0;i<m;i++) { 425758cb9c82SHong Zhang bnzi = 0; 425858cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 425958cb9c82SHong Zhang arow = owners[rank] + i; 426058cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 426158cb9c82SHong Zhang aj = a->j + ai[arow]; 4262be0fcf8dSHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 426358cb9c82SHong Zhang bnzi += nlnk; 426458cb9c82SHong Zhang /* add received col data into lnk */ 426551a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 426655d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 42673e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 42683e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 42693e06a4e6SHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 42703e06a4e6SHong Zhang bnzi += nlnk; 42713e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 42723e06a4e6SHong Zhang } 427358cb9c82SHong Zhang } 4274bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 427558cb9c82SHong Zhang 427658cb9c82SHong Zhang /* if free space is not available, make more free space */ 427758cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 42784238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 427958cb9c82SHong Zhang nspacedouble++; 428058cb9c82SHong Zhang } 428158cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4282be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4283bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4284bcc1bcd5SHong Zhang 428558cb9c82SHong Zhang current_space->array += bnzi; 428658cb9c82SHong Zhang current_space->local_used += bnzi; 428758cb9c82SHong Zhang current_space->local_remaining -= bnzi; 428858cb9c82SHong Zhang 428958cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 429058cb9c82SHong Zhang } 4291bcc1bcd5SHong Zhang 4292bcc1bcd5SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 4293bcc1bcd5SHong Zhang 4294b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4295a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4296be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4297409913e3SHong Zhang 4298bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4299bcc1bcd5SHong Zhang /*---------------------------------------*/ 4300f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 430154b84b50SHong Zhang if (n==PETSC_DECIDE) { 4302f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 430354b84b50SHong Zhang } else { 4304f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 430554b84b50SHong Zhang } 4306bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4307bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4308bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 430958cb9c82SHong Zhang 43106abd8857SHong Zhang /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */ 43116abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 4312affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 4313affca5deSHong Zhang merge->bi = bi; 4314affca5deSHong Zhang merge->bj = bj; 431502c68681SHong Zhang merge->buf_ri = buf_ri; 431602c68681SHong Zhang merge->buf_rj = buf_rj; 4317de0260b3SHong Zhang merge->coi = PETSC_NULL; 4318de0260b3SHong Zhang merge->coj = PETSC_NULL; 4319de0260b3SHong Zhang merge->owners_co = PETSC_NULL; 4320affca5deSHong Zhang 4321affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 4322776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 4323776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 4324affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 4325affca5deSHong Zhang *mpimat = B_mpi; 432638f152feSBarry Smith 432738f152feSBarry Smith ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 43284ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 4329e5f2cdd8SHong Zhang PetscFunctionReturn(0); 4330e5f2cdd8SHong Zhang } 433125616d81SHong Zhang 433238f152feSBarry Smith #undef __FUNCT__ 433338f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI" 4334be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 433555d1abb9SHong Zhang { 433655d1abb9SHong Zhang PetscErrorCode ierr; 433755d1abb9SHong Zhang 433855d1abb9SHong Zhang PetscFunctionBegin; 43394ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 434055d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 434155d1abb9SHong Zhang ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 434255d1abb9SHong Zhang } 434355d1abb9SHong Zhang ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr); 43444ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 434555d1abb9SHong Zhang PetscFunctionReturn(0); 434655d1abb9SHong Zhang } 43474ebed01fSBarry Smith 434825616d81SHong Zhang #undef __FUNCT__ 434925616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat" 4350bc08b0f1SBarry Smith /*@ 435132fba14fSHong Zhang MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows 435225616d81SHong Zhang 435332fba14fSHong Zhang Not Collective 435425616d81SHong Zhang 435525616d81SHong Zhang Input Parameters: 435625616d81SHong Zhang + A - the matrix 435725616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 435825616d81SHong Zhang 435925616d81SHong Zhang Output Parameter: 436025616d81SHong Zhang . A_loc - the local sequential matrix generated 436125616d81SHong Zhang 436225616d81SHong Zhang Level: developer 436325616d81SHong Zhang 436425616d81SHong Zhang @*/ 4365be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 436625616d81SHong Zhang { 436725616d81SHong Zhang PetscErrorCode ierr; 436801b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 436901b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 437001b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 4371a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 4372a77337e4SBarry Smith PetscScalar *ca; 4373d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 43745a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 437525616d81SHong Zhang 437625616d81SHong Zhang PetscFunctionBegin; 43774ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 437801b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4379dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 4380dea91ad1SHong Zhang ci[0] = 0; 438101b7ae99SHong Zhang for (i=0; i<am; i++){ 4382dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 438301b7ae99SHong Zhang } 4384dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 4385dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 4386dea91ad1SHong Zhang k = 0; 438701b7ae99SHong Zhang for (i=0; i<am; i++) { 43885a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 43895a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 439001b7ae99SHong Zhang /* off-diagonal portion of A */ 43915a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 43925a7d977cSHong Zhang col = cmap[*bj]; 43935a7d977cSHong Zhang if (col >= cstart) break; 43945a7d977cSHong Zhang cj[k] = col; bj++; 43955a7d977cSHong Zhang ca[k++] = *ba++; 43965a7d977cSHong Zhang } 43975a7d977cSHong Zhang /* diagonal portion of A */ 43985a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 43995a7d977cSHong Zhang cj[k] = cstart + *aj++; 44005a7d977cSHong Zhang ca[k++] = *aa++; 44015a7d977cSHong Zhang } 44025a7d977cSHong Zhang /* off-diagonal portion of A */ 44035a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 44045a7d977cSHong Zhang cj[k] = cmap[*bj++]; 44055a7d977cSHong Zhang ca[k++] = *ba++; 44065a7d977cSHong Zhang } 440725616d81SHong Zhang } 4408dea91ad1SHong Zhang /* put together the new matrix */ 4409d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 4410dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4411dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 4412dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 4413e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 4414e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 4415dea91ad1SHong Zhang mat->nonew = 0; 44165a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 44175a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 4418a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 44195a7d977cSHong Zhang for (i=0; i<am; i++) { 44205a7d977cSHong Zhang /* off-diagonal portion of A */ 44215a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 44225a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 44235a7d977cSHong Zhang col = cmap[*bj]; 44245a7d977cSHong Zhang if (col >= cstart) break; 4425a77337e4SBarry Smith *cam++ = *ba++; bj++; 44265a7d977cSHong Zhang } 44275a7d977cSHong Zhang /* diagonal portion of A */ 4428ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 4429a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 44305a7d977cSHong Zhang /* off-diagonal portion of A */ 4431f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 4432a77337e4SBarry Smith *cam++ = *ba++; bj++; 4433f33d1a9aSHong Zhang } 44345a7d977cSHong Zhang } 44355a7d977cSHong Zhang } else { 44365a7d977cSHong Zhang SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 443725616d81SHong Zhang } 443801b7ae99SHong Zhang 44394ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 444025616d81SHong Zhang PetscFunctionReturn(0); 444125616d81SHong Zhang } 444225616d81SHong Zhang 444332fba14fSHong Zhang #undef __FUNCT__ 444432fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed" 444532fba14fSHong Zhang /*@C 444632fba14fSHong Zhang MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns 444732fba14fSHong Zhang 444832fba14fSHong Zhang Not Collective 444932fba14fSHong Zhang 445032fba14fSHong Zhang Input Parameters: 445132fba14fSHong Zhang + A - the matrix 445232fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 445332fba14fSHong Zhang - row, col - index sets of rows and columns to extract (or PETSC_NULL) 445432fba14fSHong Zhang 445532fba14fSHong Zhang Output Parameter: 445632fba14fSHong Zhang . A_loc - the local sequential matrix generated 445732fba14fSHong Zhang 445832fba14fSHong Zhang Level: developer 445932fba14fSHong Zhang 446032fba14fSHong Zhang @*/ 4461be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 446232fba14fSHong Zhang { 446332fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 446432fba14fSHong Zhang PetscErrorCode ierr; 446532fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 446632fba14fSHong Zhang IS isrowa,iscola; 446732fba14fSHong Zhang Mat *aloc; 446832fba14fSHong Zhang 446932fba14fSHong Zhang PetscFunctionBegin; 44704ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 447132fba14fSHong Zhang if (!row){ 4472d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 447332fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 447432fba14fSHong Zhang } else { 447532fba14fSHong Zhang isrowa = *row; 447632fba14fSHong Zhang } 447732fba14fSHong Zhang if (!col){ 4478d0f46423SBarry Smith start = A->cmap->rstart; 447932fba14fSHong Zhang cmap = a->garray; 4480d0f46423SBarry Smith nzA = a->A->cmap->n; 4481d0f46423SBarry Smith nzB = a->B->cmap->n; 448232fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 448332fba14fSHong Zhang ncols = 0; 448432fba14fSHong Zhang for (i=0; i<nzB; i++) { 448532fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 448632fba14fSHong Zhang else break; 448732fba14fSHong Zhang } 448832fba14fSHong Zhang imark = i; 448932fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 449032fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 449132fba14fSHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr); 449232fba14fSHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 449332fba14fSHong Zhang } else { 449432fba14fSHong Zhang iscola = *col; 449532fba14fSHong Zhang } 449632fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX){ 449732fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 449832fba14fSHong Zhang aloc[0] = *A_loc; 449932fba14fSHong Zhang } 450032fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 450132fba14fSHong Zhang *A_loc = aloc[0]; 450232fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 450332fba14fSHong Zhang if (!row){ 450432fba14fSHong Zhang ierr = ISDestroy(isrowa);CHKERRQ(ierr); 450532fba14fSHong Zhang } 450632fba14fSHong Zhang if (!col){ 450732fba14fSHong Zhang ierr = ISDestroy(iscola);CHKERRQ(ierr); 450832fba14fSHong Zhang } 45094ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 451032fba14fSHong Zhang PetscFunctionReturn(0); 451132fba14fSHong Zhang } 451232fba14fSHong Zhang 451325616d81SHong Zhang #undef __FUNCT__ 451425616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 451525616d81SHong Zhang /*@C 451632fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 451725616d81SHong Zhang 451825616d81SHong Zhang Collective on Mat 451925616d81SHong Zhang 452025616d81SHong Zhang Input Parameters: 4521e240928fSHong Zhang + A,B - the matrices in mpiaij format 452225616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 452325616d81SHong Zhang - rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL) 452425616d81SHong Zhang 452525616d81SHong Zhang Output Parameter: 452625616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 4527d0f46423SBarry Smith . brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows 452825616d81SHong Zhang - B_seq - the sequential matrix generated 452925616d81SHong Zhang 453025616d81SHong Zhang Level: developer 453125616d81SHong Zhang 453225616d81SHong Zhang @*/ 4533be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq) 453425616d81SHong Zhang { 4535899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 453625616d81SHong Zhang PetscErrorCode ierr; 4537b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 453825616d81SHong Zhang IS isrowb,iscolb; 453925616d81SHong Zhang Mat *bseq; 454025616d81SHong Zhang 454125616d81SHong Zhang PetscFunctionBegin; 4542d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4543d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 454425616d81SHong Zhang } 45454ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 454625616d81SHong Zhang 454725616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4548d0f46423SBarry Smith start = A->cmap->rstart; 454925616d81SHong Zhang cmap = a->garray; 4550d0f46423SBarry Smith nzA = a->A->cmap->n; 4551d0f46423SBarry Smith nzB = a->B->cmap->n; 4552b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 455325616d81SHong Zhang ncols = 0; 45540390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 455525616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 455625616d81SHong Zhang else break; 455725616d81SHong Zhang } 455825616d81SHong Zhang imark = i; 45590390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 45600390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 456125616d81SHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr); 456225616d81SHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 456325616d81SHong Zhang *brstart = imark; 4564d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 456525616d81SHong Zhang } else { 456625616d81SHong Zhang if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 456725616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 456825616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 456925616d81SHong Zhang bseq[0] = *B_seq; 457025616d81SHong Zhang } 457125616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 457225616d81SHong Zhang *B_seq = bseq[0]; 457325616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 457425616d81SHong Zhang if (!rowb){ 457525616d81SHong Zhang ierr = ISDestroy(isrowb);CHKERRQ(ierr); 457625616d81SHong Zhang } else { 457725616d81SHong Zhang *rowb = isrowb; 457825616d81SHong Zhang } 457925616d81SHong Zhang if (!colb){ 458025616d81SHong Zhang ierr = ISDestroy(iscolb);CHKERRQ(ierr); 458125616d81SHong Zhang } else { 458225616d81SHong Zhang *colb = iscolb; 458325616d81SHong Zhang } 45844ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 458525616d81SHong Zhang PetscFunctionReturn(0); 458625616d81SHong Zhang } 4587429d309bSHong Zhang 4588a61c8c0fSHong Zhang #undef __FUNCT__ 4589a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols" 4590429d309bSHong Zhang /*@C 4591429d309bSHong Zhang MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 459201b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 4593429d309bSHong Zhang 4594429d309bSHong Zhang Collective on Mat 4595429d309bSHong Zhang 4596429d309bSHong Zhang Input Parameters: 4597429d309bSHong Zhang + A,B - the matrices in mpiaij format 459887025532SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 459987025532SHong Zhang . startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL) 460087025532SHong Zhang - bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL) 4601429d309bSHong Zhang 4602429d309bSHong Zhang Output Parameter: 460387025532SHong Zhang + B_oth - the sequential matrix generated 4604429d309bSHong Zhang 4605429d309bSHong Zhang Level: developer 4606429d309bSHong Zhang 4607429d309bSHong Zhang @*/ 4608dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth) 4609429d309bSHong Zhang { 4610a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 4611429d309bSHong Zhang PetscErrorCode ierr; 4612899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 461387025532SHong Zhang Mat_SeqAIJ *b_oth; 4614a6b2eed2SHong Zhang VecScatter ctx=a->Mvctx; 46157adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)ctx)->comm; 46167adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 4617d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 4618dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 4619dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 4620e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 4621910ba992SMatthew Knepley MPI_Request *rwaits = PETSC_NULL,*swaits = PETSC_NULL; 462287025532SHong Zhang MPI_Status *sstatus,rstatus; 4623aa5bb8c0SSatish Balay PetscMPIInt jj; 4624e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 4625ba8c8a56SBarry Smith PetscScalar *vals; 4626429d309bSHong Zhang 4627429d309bSHong Zhang PetscFunctionBegin; 4628d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4629d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 4630429d309bSHong Zhang } 46314ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4632a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 4633a6b2eed2SHong Zhang 4634a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 4635a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 4636e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 4637e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 4638a6b2eed2SHong Zhang nrecvs = gen_from->n; 4639a6b2eed2SHong Zhang nsends = gen_to->n; 4640d7ee0231SBarry Smith 4641d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 4642a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 4643a6b2eed2SHong Zhang sstarts = gen_to->starts; 4644a6b2eed2SHong Zhang sprocs = gen_to->procs; 4645a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 4646e42f35eeSHong Zhang sbs = gen_to->bs; 4647e42f35eeSHong Zhang rstarts = gen_from->starts; 4648e42f35eeSHong Zhang rprocs = gen_from->procs; 4649e42f35eeSHong Zhang rbs = gen_from->bs; 4650429d309bSHong Zhang 4651dea91ad1SHong Zhang if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 4652429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4653a6b2eed2SHong Zhang /* i-array */ 4654a6b2eed2SHong Zhang /*---------*/ 4655a6b2eed2SHong Zhang /* post receives */ 4656a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4657e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4658e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 465987025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4660429d309bSHong Zhang } 4661a6b2eed2SHong Zhang 4662a6b2eed2SHong Zhang /* pack the outgoing message */ 466387025532SHong Zhang ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr); 4664a6b2eed2SHong Zhang rstartsj = sstartsj + nsends +1; 4665a6b2eed2SHong Zhang sstartsj[0] = 0; rstartsj[0] = 0; 4666a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 4667a6b2eed2SHong Zhang k = 0; 4668a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4669e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 4670e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 467187025532SHong Zhang for (j=0; j<nrows; j++) { 4672d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 4673e42f35eeSHong Zhang for (l=0; l<sbs; l++){ 4674e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */ 4675e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 4676e42f35eeSHong Zhang len += ncols; 4677e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 4678e42f35eeSHong Zhang } 4679a6b2eed2SHong Zhang k++; 4680429d309bSHong Zhang } 4681e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4682dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 4683429d309bSHong Zhang } 468487025532SHong Zhang /* recvs and sends of i-array are completed */ 468587025532SHong Zhang i = nrecvs; 468687025532SHong Zhang while (i--) { 4687aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 468887025532SHong Zhang } 46890c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4690e42f35eeSHong Zhang 4691a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 4692a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 4693a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 4694a6b2eed2SHong Zhang 469587025532SHong Zhang /* create i-array of B_oth */ 469687025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 469787025532SHong Zhang b_othi[0] = 0; 4698a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 4699a6b2eed2SHong Zhang k = 0; 4700a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4701fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4702e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 470387025532SHong Zhang for (j=0; j<nrows; j++) { 470487025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 4705a6b2eed2SHong Zhang len += rowlen[j]; k++; 4706a6b2eed2SHong Zhang } 4707dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 4708a6b2eed2SHong Zhang } 4709a6b2eed2SHong Zhang 471087025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 471187025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 4712dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 4713a6b2eed2SHong Zhang 471487025532SHong Zhang /* j-array */ 471587025532SHong Zhang /*---------*/ 4716a6b2eed2SHong Zhang /* post receives of j-array */ 4717a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 471887025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 471987025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4720a6b2eed2SHong Zhang } 4721e42f35eeSHong Zhang 4722e42f35eeSHong Zhang /* pack the outgoing message j-array */ 4723a6b2eed2SHong Zhang k = 0; 4724a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4725e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 4726a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 472787025532SHong Zhang for (j=0; j<nrows; j++) { 4728d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4729e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4730e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4731a6b2eed2SHong Zhang for (l=0; l<ncols; l++){ 4732a6b2eed2SHong Zhang *bufJ++ = cols[l]; 473387025532SHong Zhang } 4734e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4735e42f35eeSHong Zhang } 473687025532SHong Zhang } 473787025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 473887025532SHong Zhang } 473987025532SHong Zhang 474087025532SHong Zhang /* recvs and sends of j-array are completed */ 474187025532SHong Zhang i = nrecvs; 474287025532SHong Zhang while (i--) { 4743aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 474487025532SHong Zhang } 47450c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 474687025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 474787025532SHong Zhang sstartsj = *startsj; 474887025532SHong Zhang rstartsj = sstartsj + nsends +1; 474987025532SHong Zhang bufa = *bufa_ptr; 475087025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 475187025532SHong Zhang b_otha = b_oth->a; 475287025532SHong Zhang } else { 475387025532SHong Zhang SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 475487025532SHong Zhang } 475587025532SHong Zhang 475687025532SHong Zhang /* a-array */ 475787025532SHong Zhang /*---------*/ 475887025532SHong Zhang /* post receives of a-array */ 475987025532SHong Zhang for (i=0; i<nrecvs; i++){ 476087025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 476187025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 476287025532SHong Zhang } 4763e42f35eeSHong Zhang 4764e42f35eeSHong Zhang /* pack the outgoing message a-array */ 476587025532SHong Zhang k = 0; 476687025532SHong Zhang for (i=0; i<nsends; i++){ 4767e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 476887025532SHong Zhang bufA = bufa+sstartsj[i]; 476987025532SHong Zhang for (j=0; j<nrows; j++) { 4770d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4771e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4772e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 477387025532SHong Zhang for (l=0; l<ncols; l++){ 4774a6b2eed2SHong Zhang *bufA++ = vals[l]; 4775a6b2eed2SHong Zhang } 4776e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 4777e42f35eeSHong Zhang } 4778a6b2eed2SHong Zhang } 477987025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4780a6b2eed2SHong Zhang } 478187025532SHong Zhang /* recvs and sends of a-array are completed */ 478287025532SHong Zhang i = nrecvs; 478387025532SHong Zhang while (i--) { 4784aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 478587025532SHong Zhang } 47860c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4787d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 4788a6b2eed2SHong Zhang 478987025532SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4790a6b2eed2SHong Zhang /* put together the new matrix */ 4791d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 4792a6b2eed2SHong Zhang 4793a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4794a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 479587025532SHong Zhang b_oth = (Mat_SeqAIJ *)(*B_oth)->data; 4796e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 4797e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 479887025532SHong Zhang b_oth->nonew = 0; 4799a6b2eed2SHong Zhang 4800a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 4801dea91ad1SHong Zhang if (!startsj || !bufa_ptr){ 4802dea91ad1SHong Zhang ierr = PetscFree(sstartsj);CHKERRQ(ierr); 4803dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 4804dea91ad1SHong Zhang } else { 480587025532SHong Zhang *startsj = sstartsj; 480687025532SHong Zhang *bufa_ptr = bufa; 480787025532SHong Zhang } 4808dea91ad1SHong Zhang } 48094ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4810429d309bSHong Zhang PetscFunctionReturn(0); 4811429d309bSHong Zhang } 4812ccd8e176SBarry Smith 481343eb5e2fSMatthew Knepley #undef __FUNCT__ 481443eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 481543eb5e2fSMatthew Knepley /*@C 481643eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 481743eb5e2fSMatthew Knepley 481843eb5e2fSMatthew Knepley Not Collective 481943eb5e2fSMatthew Knepley 482043eb5e2fSMatthew Knepley Input Parameters: 482143eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 482243eb5e2fSMatthew Knepley 482343eb5e2fSMatthew Knepley Output Parameter: 482443eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 482543eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 482643eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 482743eb5e2fSMatthew Knepley 482843eb5e2fSMatthew Knepley Level: developer 482943eb5e2fSMatthew Knepley 483043eb5e2fSMatthew Knepley @*/ 483143eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE) 483243eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 483343eb5e2fSMatthew Knepley #else 483443eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 483543eb5e2fSMatthew Knepley #endif 483643eb5e2fSMatthew Knepley { 483743eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 483843eb5e2fSMatthew Knepley 483943eb5e2fSMatthew Knepley PetscFunctionBegin; 484043eb5e2fSMatthew Knepley PetscValidHeaderSpecific(A, MAT_COOKIE, 1); 484143eb5e2fSMatthew Knepley PetscValidPointer(lvec, 2) 484243eb5e2fSMatthew Knepley PetscValidPointer(colmap, 3) 484343eb5e2fSMatthew Knepley PetscValidPointer(multScatter, 4) 484443eb5e2fSMatthew Knepley a = (Mat_MPIAIJ *) A->data; 484543eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 484643eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 484743eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 484843eb5e2fSMatthew Knepley PetscFunctionReturn(0); 484943eb5e2fSMatthew Knepley } 485043eb5e2fSMatthew Knepley 485117667f90SBarry Smith EXTERN_C_BEGIN 48528cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*); 48538cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*); 485417667f90SBarry Smith EXTERN_C_END 485517667f90SBarry Smith 48567c4f633dSBarry Smith #include "../src/mat/impls/dense/mpi/mpidense.h" 4857fc4dec0aSBarry Smith 4858fc4dec0aSBarry Smith #undef __FUNCT__ 4859fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 4860fc4dec0aSBarry Smith /* 4861fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 4862fc4dec0aSBarry Smith 4863fc4dec0aSBarry Smith n p p 4864fc4dec0aSBarry Smith ( ) ( ) ( ) 4865fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 4866fc4dec0aSBarry Smith ( ) ( ) ( ) 4867fc4dec0aSBarry Smith 4868fc4dec0aSBarry Smith */ 4869fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 4870fc4dec0aSBarry Smith { 4871fc4dec0aSBarry Smith PetscErrorCode ierr; 4872fc4dec0aSBarry Smith Mat At,Bt,Ct; 4873fc4dec0aSBarry Smith 4874fc4dec0aSBarry Smith PetscFunctionBegin; 4875fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 4876fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 4877fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 4878fc4dec0aSBarry Smith ierr = MatDestroy(At);CHKERRQ(ierr); 4879fc4dec0aSBarry Smith ierr = MatDestroy(Bt);CHKERRQ(ierr); 4880fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 4881e5e4356aSBarry Smith ierr = MatDestroy(Ct);CHKERRQ(ierr); 4882fc4dec0aSBarry Smith PetscFunctionReturn(0); 4883fc4dec0aSBarry Smith } 4884fc4dec0aSBarry Smith 4885fc4dec0aSBarry Smith #undef __FUNCT__ 4886fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 4887fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 4888fc4dec0aSBarry Smith { 4889fc4dec0aSBarry Smith PetscErrorCode ierr; 4890d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 4891fc4dec0aSBarry Smith Mat Cmat; 4892fc4dec0aSBarry Smith 4893fc4dec0aSBarry Smith PetscFunctionBegin; 4894d0f46423SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 489539804f7cSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr); 4896fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4897fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 4898fc4dec0aSBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr); 489938556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 490038556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4901fc4dec0aSBarry Smith *C = Cmat; 4902fc4dec0aSBarry Smith PetscFunctionReturn(0); 4903fc4dec0aSBarry Smith } 4904fc4dec0aSBarry Smith 4905fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 4906fc4dec0aSBarry Smith #undef __FUNCT__ 4907fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 4908fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 4909fc4dec0aSBarry Smith { 4910fc4dec0aSBarry Smith PetscErrorCode ierr; 4911fc4dec0aSBarry Smith 4912fc4dec0aSBarry Smith PetscFunctionBegin; 4913fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX){ 4914fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 4915fc4dec0aSBarry Smith } 4916fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 4917fc4dec0aSBarry Smith PetscFunctionReturn(0); 4918fc4dec0aSBarry Smith } 4919fc4dec0aSBarry Smith 49205c9eb25fSBarry Smith EXTERN_C_BEGIN 4921611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 49225c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*); 4923611f576cSBarry Smith #endif 49243bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 49253bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 49263bf14a46SMatthew Knepley #endif 4927611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 49285c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 4929611f576cSBarry Smith #endif 4930611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 49315c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*); 4932611f576cSBarry Smith #endif 49335c9eb25fSBarry Smith EXTERN_C_END 49345c9eb25fSBarry Smith 4935ccd8e176SBarry Smith /*MC 4936ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 4937ccd8e176SBarry Smith 4938ccd8e176SBarry Smith Options Database Keys: 4939ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 4940ccd8e176SBarry Smith 4941ccd8e176SBarry Smith Level: beginner 4942ccd8e176SBarry Smith 4943175b88e8SBarry Smith .seealso: MatCreateMPIAIJ() 4944ccd8e176SBarry Smith M*/ 4945ccd8e176SBarry Smith 4946ccd8e176SBarry Smith EXTERN_C_BEGIN 4947ccd8e176SBarry Smith #undef __FUNCT__ 4948ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 4949be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B) 4950ccd8e176SBarry Smith { 4951ccd8e176SBarry Smith Mat_MPIAIJ *b; 4952ccd8e176SBarry Smith PetscErrorCode ierr; 4953ccd8e176SBarry Smith PetscMPIInt size; 4954ccd8e176SBarry Smith 4955ccd8e176SBarry Smith PetscFunctionBegin; 49567adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr); 4957ccd8e176SBarry Smith 495838f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 4959ccd8e176SBarry Smith B->data = (void*)b; 4960ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 4961d0f46423SBarry Smith B->rmap->bs = 1; 4962ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 4963ccd8e176SBarry Smith B->mapping = 0; 4964ccd8e176SBarry Smith 4965ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 4966ccd8e176SBarry Smith b->size = size; 49677adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 4968ccd8e176SBarry Smith 4969ccd8e176SBarry Smith /* build cache for off array entries formed */ 49707adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 4971ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 4972ccd8e176SBarry Smith b->colmap = 0; 4973ccd8e176SBarry Smith b->garray = 0; 4974ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 4975ccd8e176SBarry Smith 4976ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 4977ccd8e176SBarry Smith b->lvec = PETSC_NULL; 4978ccd8e176SBarry Smith b->Mvctx = PETSC_NULL; 4979ccd8e176SBarry Smith 4980ccd8e176SBarry Smith /* stuff for MatGetRow() */ 4981ccd8e176SBarry Smith b->rowindices = 0; 4982ccd8e176SBarry Smith b->rowvalues = 0; 4983ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 4984ccd8e176SBarry Smith 4985611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 49865c9eb25fSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_spooles_C", 49875c9eb25fSBarry Smith "MatGetFactor_mpiaij_spooles", 49885c9eb25fSBarry Smith MatGetFactor_mpiaij_spooles);CHKERRQ(ierr); 4989611f576cSBarry Smith #endif 4990611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 49915c9eb25fSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_mumps_C", 49925c9eb25fSBarry Smith "MatGetFactor_mpiaij_mumps", 49935c9eb25fSBarry Smith MatGetFactor_mpiaij_mumps);CHKERRQ(ierr); 4994611f576cSBarry Smith #endif 49953bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 49963bf14a46SMatthew Knepley ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_pastix_C", 49973bf14a46SMatthew Knepley "MatGetFactor_mpiaij_pastix", 49983bf14a46SMatthew Knepley MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 49993bf14a46SMatthew Knepley #endif 5000611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 50015c9eb25fSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_superlu_dist_C", 50025c9eb25fSBarry Smith "MatGetFactor_mpiaij_superlu_dist", 50035c9eb25fSBarry Smith MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5004611f576cSBarry Smith #endif 5005ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 5006ccd8e176SBarry Smith "MatStoreValues_MPIAIJ", 5007ccd8e176SBarry Smith MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5008ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 5009ccd8e176SBarry Smith "MatRetrieveValues_MPIAIJ", 5010ccd8e176SBarry Smith MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5011ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 5012ccd8e176SBarry Smith "MatGetDiagonalBlock_MPIAIJ", 5013ccd8e176SBarry Smith MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5014ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C", 5015ccd8e176SBarry Smith "MatIsTranspose_MPIAIJ", 5016ccd8e176SBarry Smith MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5017ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C", 5018ccd8e176SBarry Smith "MatMPIAIJSetPreallocation_MPIAIJ", 5019ccd8e176SBarry Smith MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5020ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C", 5021ccd8e176SBarry Smith "MatMPIAIJSetPreallocationCSR_MPIAIJ", 5022ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5023ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C", 5024ccd8e176SBarry Smith "MatDiagonalScaleLocal_MPIAIJ", 5025ccd8e176SBarry Smith MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 502617667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C", 502717667f90SBarry Smith "MatConvert_MPIAIJ_MPICSRPERM", 502817667f90SBarry Smith MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr); 502917667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C", 503017667f90SBarry Smith "MatConvert_MPIAIJ_MPICRL", 503117667f90SBarry Smith MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr); 5032fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C", 5033fc4dec0aSBarry Smith "MatMatMult_MPIDense_MPIAIJ", 5034fc4dec0aSBarry Smith MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5035fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C", 5036fc4dec0aSBarry Smith "MatMatMultSymbolic_MPIDense_MPIAIJ", 5037fc4dec0aSBarry Smith MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5038fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C", 5039fc4dec0aSBarry Smith "MatMatMultNumeric_MPIDense_MPIAIJ", 5040fc4dec0aSBarry Smith MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 504117667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5042ccd8e176SBarry Smith PetscFunctionReturn(0); 5043ccd8e176SBarry Smith } 5044ccd8e176SBarry Smith EXTERN_C_END 504581824310SBarry Smith 504603bfb495SBarry Smith #undef __FUNCT__ 504703bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 504858d36128SBarry Smith /*@ 504903bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 505003bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 505103bfb495SBarry Smith 505203bfb495SBarry Smith Collective on MPI_Comm 505303bfb495SBarry Smith 505403bfb495SBarry Smith Input Parameters: 505503bfb495SBarry Smith + comm - MPI communicator 505603bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 505703bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 505803bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 505903bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 506003bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 506103bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 506203bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 506303bfb495SBarry Smith . j - column indices 506403bfb495SBarry Smith . a - matrix values 506503bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 506603bfb495SBarry Smith . oj - column indices 506703bfb495SBarry Smith - oa - matrix values 506803bfb495SBarry Smith 506903bfb495SBarry Smith Output Parameter: 507003bfb495SBarry Smith . mat - the matrix 507103bfb495SBarry Smith 507203bfb495SBarry Smith Level: advanced 507303bfb495SBarry Smith 507403bfb495SBarry Smith Notes: 507503bfb495SBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. 507603bfb495SBarry Smith 507703bfb495SBarry Smith The i and j indices are 0 based 507803bfb495SBarry Smith 507903bfb495SBarry Smith See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 508003bfb495SBarry Smith 508103bfb495SBarry Smith 508203bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 508303bfb495SBarry Smith 508403bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 50858d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays() 508603bfb495SBarry Smith @*/ 50878d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[], 508803bfb495SBarry Smith PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 508903bfb495SBarry Smith { 509003bfb495SBarry Smith PetscErrorCode ierr; 509103bfb495SBarry Smith Mat_MPIAIJ *maij; 509203bfb495SBarry Smith 509303bfb495SBarry Smith PetscFunctionBegin; 509403bfb495SBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 509503bfb495SBarry Smith if (i[0]) { 509603bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 509703bfb495SBarry Smith } 509803bfb495SBarry Smith if (oi[0]) { 509903bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 510003bfb495SBarry Smith } 510103bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 510203bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 510303bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 510403bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 51058d7a6e47SBarry Smith maij->donotstash = PETSC_TRUE; 51068d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 510703bfb495SBarry Smith 51087408324eSLisandro Dalcin ierr = PetscMapSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr); 51097408324eSLisandro Dalcin ierr = PetscMapSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr); 5110d0f46423SBarry Smith ierr = PetscMapSetUp((*mat)->rmap);CHKERRQ(ierr); 5111d0f46423SBarry Smith ierr = PetscMapSetUp((*mat)->cmap);CHKERRQ(ierr); 511203bfb495SBarry Smith 511303bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5114d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 511503bfb495SBarry Smith 51168d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51178d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51188d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51198d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51208d7a6e47SBarry Smith 512103bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 512203bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 512303bfb495SBarry Smith PetscFunctionReturn(0); 512403bfb495SBarry Smith } 512503bfb495SBarry Smith 512681824310SBarry Smith /* 512781824310SBarry Smith Special version for direct calls from Fortran 512881824310SBarry Smith */ 512981824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 513081824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 513181824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 513281824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 513381824310SBarry Smith #endif 513481824310SBarry Smith 513581824310SBarry Smith /* Change these macros so can be used in void function */ 513681824310SBarry Smith #undef CHKERRQ 51377adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr) 513881824310SBarry Smith #undef SETERRQ2 51397adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr) 514081824310SBarry Smith #undef SETERRQ 51417adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr) 514281824310SBarry Smith 514381824310SBarry Smith EXTERN_C_BEGIN 514481824310SBarry Smith #undef __FUNCT__ 514581824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 51461f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 514781824310SBarry Smith { 514881824310SBarry Smith Mat mat = *mmat; 514981824310SBarry Smith PetscInt m = *mm, n = *mn; 515081824310SBarry Smith InsertMode addv = *maddv; 515181824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 515281824310SBarry Smith PetscScalar value; 515381824310SBarry Smith PetscErrorCode ierr; 5154899cda47SBarry Smith 5155d9e2c085SLisandro Dalcin ierr = MatPreallocated(mat);CHKERRQ(ierr); 515681824310SBarry Smith if (mat->insertmode == NOT_SET_VALUES) { 515781824310SBarry Smith mat->insertmode = addv; 515881824310SBarry Smith } 515981824310SBarry Smith #if defined(PETSC_USE_DEBUG) 516081824310SBarry Smith else if (mat->insertmode != addv) { 516181824310SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 516281824310SBarry Smith } 516381824310SBarry Smith #endif 516481824310SBarry Smith { 5165d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5166d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 516781824310SBarry Smith PetscTruth roworiented = aij->roworiented; 516881824310SBarry Smith 516981824310SBarry Smith /* Some Variables required in the macro */ 517081824310SBarry Smith Mat A = aij->A; 517181824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 517281824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5173dd6ea824SBarry Smith MatScalar *aa = a->a; 517481824310SBarry Smith PetscTruth ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE); 517581824310SBarry Smith Mat B = aij->B; 517681824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5177d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5178dd6ea824SBarry Smith MatScalar *ba = b->a; 517981824310SBarry Smith 518081824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 518181824310SBarry Smith PetscInt nonew = a->nonew; 5182dd6ea824SBarry Smith MatScalar *ap1,*ap2; 518381824310SBarry Smith 518481824310SBarry Smith PetscFunctionBegin; 518581824310SBarry Smith for (i=0; i<m; i++) { 518681824310SBarry Smith if (im[i] < 0) continue; 518781824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5188d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 518981824310SBarry Smith #endif 519081824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 519181824310SBarry Smith row = im[i] - rstart; 519281824310SBarry Smith lastcol1 = -1; 519381824310SBarry Smith rp1 = aj + ai[row]; 519481824310SBarry Smith ap1 = aa + ai[row]; 519581824310SBarry Smith rmax1 = aimax[row]; 519681824310SBarry Smith nrow1 = ailen[row]; 519781824310SBarry Smith low1 = 0; 519881824310SBarry Smith high1 = nrow1; 519981824310SBarry Smith lastcol2 = -1; 520081824310SBarry Smith rp2 = bj + bi[row]; 520181824310SBarry Smith ap2 = ba + bi[row]; 520281824310SBarry Smith rmax2 = bimax[row]; 520381824310SBarry Smith nrow2 = bilen[row]; 520481824310SBarry Smith low2 = 0; 520581824310SBarry Smith high2 = nrow2; 520681824310SBarry Smith 520781824310SBarry Smith for (j=0; j<n; j++) { 520881824310SBarry Smith if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 520981824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 521081824310SBarry Smith if (in[j] >= cstart && in[j] < cend){ 521181824310SBarry Smith col = in[j] - cstart; 521281824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 521381824310SBarry Smith } else if (in[j] < 0) continue; 521481824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5215d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 521681824310SBarry Smith #endif 521781824310SBarry Smith else { 521881824310SBarry Smith if (mat->was_assembled) { 521981824310SBarry Smith if (!aij->colmap) { 522081824310SBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 522181824310SBarry Smith } 522281824310SBarry Smith #if defined (PETSC_USE_CTABLE) 522381824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 522481824310SBarry Smith col--; 522581824310SBarry Smith #else 522681824310SBarry Smith col = aij->colmap[in[j]] - 1; 522781824310SBarry Smith #endif 522881824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 522981824310SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 523081824310SBarry Smith col = in[j]; 523181824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 523281824310SBarry Smith B = aij->B; 523381824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 523481824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 523581824310SBarry Smith rp2 = bj + bi[row]; 523681824310SBarry Smith ap2 = ba + bi[row]; 523781824310SBarry Smith rmax2 = bimax[row]; 523881824310SBarry Smith nrow2 = bilen[row]; 523981824310SBarry Smith low2 = 0; 524081824310SBarry Smith high2 = nrow2; 5241d0f46423SBarry Smith bm = aij->B->rmap->n; 524281824310SBarry Smith ba = b->a; 524381824310SBarry Smith } 524481824310SBarry Smith } else col = in[j]; 524581824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 524681824310SBarry Smith } 524781824310SBarry Smith } 524881824310SBarry Smith } else { 524981824310SBarry Smith if (!aij->donotstash) { 525081824310SBarry Smith if (roworiented) { 525181824310SBarry Smith if (ignorezeroentries && v[i*n] == 0.0) continue; 525281824310SBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr); 525381824310SBarry Smith } else { 525481824310SBarry Smith if (ignorezeroentries && v[i] == 0.0) continue; 525581824310SBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr); 525681824310SBarry Smith } 525781824310SBarry Smith } 525881824310SBarry Smith } 525981824310SBarry Smith }} 526081824310SBarry Smith PetscFunctionReturnVoid(); 526181824310SBarry Smith } 526281824310SBarry Smith EXTERN_C_END 526303bfb495SBarry Smith 5264