1be1d678aSKris Buschelman #define PETSCMAT_DLL 28a729477SBarry Smith 37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h" /*I "petscmat.h" I*/ 47c4f633dSBarry Smith #include "../src/inline/spops.h" 58a729477SBarry Smith 6dd6ea824SBarry Smith #undef __FUNCT__ 7dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 8dd6ea824SBarry Smith /* 9dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 10dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 11dd6ea824SBarry Smith 12dd6ea824SBarry Smith Only for square matrices 13dd6ea824SBarry Smith */ 14dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 15dd6ea824SBarry Smith { 16dd6ea824SBarry Smith PetscMPIInt rank,size; 17dd6ea824SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld; 18dd6ea824SBarry Smith PetscErrorCode ierr; 19dd6ea824SBarry Smith Mat mat; 20dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 21dd6ea824SBarry Smith PetscMPIInt tag; 22dd6ea824SBarry Smith MPI_Status status; 23dd6ea824SBarry Smith PetscTruth aij; 24dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 25dd6ea824SBarry Smith 26dd6ea824SBarry Smith PetscFunctionBegin; 27dd6ea824SBarry Smith CHKMEMQ; 28dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 29dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 30dd6ea824SBarry Smith if (!rank) { 31dd6ea824SBarry Smith ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 32dd6ea824SBarry Smith if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 33dd6ea824SBarry Smith } 34dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 35dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 36dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 37dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 38dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 39dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 40dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 41dd6ea824SBarry Smith rowners[0] = 0; 42dd6ea824SBarry Smith for (i=2; i<=size; i++) { 43dd6ea824SBarry Smith rowners[i] += rowners[i-1]; 44dd6ea824SBarry Smith } 45dd6ea824SBarry Smith rstart = rowners[rank]; 46dd6ea824SBarry Smith rend = rowners[rank+1]; 47dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 48dd6ea824SBarry Smith if (!rank) { 49dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 50dd6ea824SBarry Smith /* send row lengths to all processors */ 51dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 52dd6ea824SBarry Smith for (i=1; i<size; i++) { 53dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 54dd6ea824SBarry Smith } 55dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 56dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 57dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 58dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 59dd6ea824SBarry Smith jj = 0; 60dd6ea824SBarry Smith for (i=0; i<m; i++) { 61dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 62dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 63dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 64dd6ea824SBarry Smith jj++; 65dd6ea824SBarry Smith } 66dd6ea824SBarry Smith } 67dd6ea824SBarry Smith /* send column indices to other processes */ 68dd6ea824SBarry Smith for (i=1; i<size; i++) { 69dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 70dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 71dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 72dd6ea824SBarry Smith } 73dd6ea824SBarry Smith 74dd6ea824SBarry Smith /* send numerical values to other processes */ 75dd6ea824SBarry Smith for (i=1; i<size; i++) { 76dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 77dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 78dd6ea824SBarry Smith } 79dd6ea824SBarry Smith gmataa = gmata->a; 80dd6ea824SBarry Smith gmataj = gmata->j; 81dd6ea824SBarry Smith 82dd6ea824SBarry Smith } else { 83dd6ea824SBarry Smith /* receive row lengths */ 84dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 85dd6ea824SBarry Smith /* receive column indices */ 86dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 87dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 88dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 89dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 90dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 91dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 92dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 93dd6ea824SBarry Smith jj = 0; 94dd6ea824SBarry Smith for (i=0; i<m; i++) { 95dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 96dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 97dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 98dd6ea824SBarry Smith jj++; 99dd6ea824SBarry Smith } 100dd6ea824SBarry Smith } 101dd6ea824SBarry Smith /* receive numerical values */ 102dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 103dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 104dd6ea824SBarry Smith } 105dd6ea824SBarry Smith /* set preallocation */ 106dd6ea824SBarry Smith for (i=0; i<m; i++) { 107dd6ea824SBarry Smith dlens[i] -= olens[i]; 108dd6ea824SBarry Smith } 109dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 110dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 111dd6ea824SBarry Smith 112dd6ea824SBarry Smith for (i=0; i<m; i++) { 113dd6ea824SBarry Smith dlens[i] += olens[i]; 114dd6ea824SBarry Smith } 115dd6ea824SBarry Smith cnt = 0; 116dd6ea824SBarry Smith for (i=0; i<m; i++) { 117dd6ea824SBarry Smith row = rstart + i; 118dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 119dd6ea824SBarry Smith cnt += dlens[i]; 120dd6ea824SBarry Smith } 121dd6ea824SBarry Smith if (rank) { 122dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 123dd6ea824SBarry Smith } 124dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 125dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 126dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 127dd6ea824SBarry Smith *inmat = mat; 128dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 129dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 130dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 131dd6ea824SBarry Smith mat = *inmat; 132dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 133dd6ea824SBarry Smith if (!rank) { 134dd6ea824SBarry Smith /* send numerical values to other processes */ 135dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 136dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 137dd6ea824SBarry Smith gmataa = gmata->a; 138dd6ea824SBarry Smith for (i=1; i<size; i++) { 139dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 140dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 141dd6ea824SBarry Smith } 142dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 143dd6ea824SBarry Smith } else { 144dd6ea824SBarry Smith /* receive numerical values from process 0*/ 145dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 146dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 147dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 148dd6ea824SBarry Smith } 149dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 150dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 151dd6ea824SBarry Smith ad = Ad->a; 152dd6ea824SBarry Smith ao = Ao->a; 153d0f46423SBarry Smith if (mat->rmap->n) { 154dd6ea824SBarry Smith i = 0; 155dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 156dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 157dd6ea824SBarry Smith } 158d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 159dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 160dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 161dd6ea824SBarry Smith } 162dd6ea824SBarry Smith i--; 163d0f46423SBarry Smith if (mat->rmap->n) { 164dd6ea824SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 165dd6ea824SBarry Smith } 166dd6ea824SBarry Smith if (rank) { 167dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 168dd6ea824SBarry Smith } 169dd6ea824SBarry Smith } 170dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 171dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 172dd6ea824SBarry Smith CHKMEMQ; 173dd6ea824SBarry Smith PetscFunctionReturn(0); 174dd6ea824SBarry Smith } 175dd6ea824SBarry Smith 1760f5bd95cSBarry Smith /* 1770f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 1789e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 1790f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 1800f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 1810f5bd95cSBarry Smith has an order N integer array but is fast to acess. 1829e25ed09SBarry Smith */ 1834a2ae208SSatish Balay #undef __FUNCT__ 1844a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private" 185dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat) 1869e25ed09SBarry Smith { 18744a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1886849ba73SBarry Smith PetscErrorCode ierr; 189d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 190dbb450caSBarry Smith 1913a40ed3dSBarry Smith PetscFunctionBegin; 192aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 193273d9f13SBarry Smith ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr); 194b1fc9764SSatish Balay for (i=0; i<n; i++){ 1950f5bd95cSBarry Smith ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr); 196b1fc9764SSatish Balay } 197b1fc9764SSatish Balay #else 198d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 199d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 200d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 201905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 202b1fc9764SSatish Balay #endif 2033a40ed3dSBarry Smith PetscFunctionReturn(0); 2049e25ed09SBarry Smith } 2059e25ed09SBarry Smith 206085a36d4SBarry Smith 2070520107fSSatish Balay #define CHUNKSIZE 15 20830770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 2090520107fSSatish Balay { \ 2107cd84e04SBarry Smith if (col <= lastcol1) low1 = 0; else high1 = nrow1; \ 211fd3458f5SBarry Smith lastcol1 = col;\ 212fd3458f5SBarry Smith while (high1-low1 > 5) { \ 213fd3458f5SBarry Smith t = (low1+high1)/2; \ 214fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 215fd3458f5SBarry Smith else low1 = t; \ 216ba4e3ef2SSatish Balay } \ 217fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 218fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 219fd3458f5SBarry Smith if (rp1[_i] == col) { \ 220fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 221fd3458f5SBarry Smith else ap1[_i] = value; \ 22230770e4dSSatish Balay goto a_noinsert; \ 2230520107fSSatish Balay } \ 2240520107fSSatish Balay } \ 225e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 226e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 227085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 228421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 229669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 2300520107fSSatish Balay /* shift up all the later entries in this row */ \ 2310520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 232fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 233fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 2340520107fSSatish Balay } \ 235fd3458f5SBarry Smith rp1[_i] = col; \ 236fd3458f5SBarry Smith ap1[_i] = value; \ 23730770e4dSSatish Balay a_noinsert: ; \ 238fd3458f5SBarry Smith ailen[row] = nrow1; \ 2390520107fSSatish Balay } 2400a198c4cSBarry Smith 241085a36d4SBarry Smith 24230770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 24330770e4dSSatish Balay { \ 2447cd84e04SBarry Smith if (col <= lastcol2) low2 = 0; else high2 = nrow2; \ 245fd3458f5SBarry Smith lastcol2 = col;\ 246fd3458f5SBarry Smith while (high2-low2 > 5) { \ 247fd3458f5SBarry Smith t = (low2+high2)/2; \ 248fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 249fd3458f5SBarry Smith else low2 = t; \ 250ba4e3ef2SSatish Balay } \ 251fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 252fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 253fd3458f5SBarry Smith if (rp2[_i] == col) { \ 254fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 255fd3458f5SBarry Smith else ap2[_i] = value; \ 25630770e4dSSatish Balay goto b_noinsert; \ 25730770e4dSSatish Balay } \ 25830770e4dSSatish Balay } \ 259e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 260e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 261085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 262421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 263669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 26430770e4dSSatish Balay /* shift up all the later entries in this row */ \ 26530770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 266fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 267fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 26830770e4dSSatish Balay } \ 269fd3458f5SBarry Smith rp2[_i] = col; \ 270fd3458f5SBarry Smith ap2[_i] = value; \ 27130770e4dSSatish Balay b_noinsert: ; \ 272fd3458f5SBarry Smith bilen[row] = nrow2; \ 27330770e4dSSatish Balay } 27430770e4dSSatish Balay 2754a2ae208SSatish Balay #undef __FUNCT__ 2762fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 2772fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 2782fd7e33dSBarry Smith { 2792fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 2802fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 2812fd7e33dSBarry Smith PetscErrorCode ierr; 2822fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 2832fd7e33dSBarry Smith 2842fd7e33dSBarry Smith PetscFunctionBegin; 2852fd7e33dSBarry Smith /* code only works for square matrices A */ 2862fd7e33dSBarry Smith 2872fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 2882fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 2892fd7e33dSBarry Smith row = row - diag; 2902fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 2912fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 2922fd7e33dSBarry Smith } 2932fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 2942fd7e33dSBarry Smith 2952fd7e33dSBarry Smith /* diagonal part */ 2962fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 2972fd7e33dSBarry Smith 2982fd7e33dSBarry Smith /* right of diagonal part */ 2992fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 3002fd7e33dSBarry Smith PetscFunctionReturn(0); 3012fd7e33dSBarry Smith } 3022fd7e33dSBarry Smith 3032fd7e33dSBarry Smith #undef __FUNCT__ 3044a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 305b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 3068a729477SBarry Smith { 30744a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 30887828ca2SBarry Smith PetscScalar value; 309dfbe8321SBarry Smith PetscErrorCode ierr; 310d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 311d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 312273d9f13SBarry Smith PetscTruth roworiented = aij->roworiented; 3138a729477SBarry Smith 3140520107fSSatish Balay /* Some Variables required in the macro */ 3154ee7247eSSatish Balay Mat A = aij->A; 3164ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 31757809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 318a77337e4SBarry Smith MatScalar *aa = a->a; 319edb03aefSBarry Smith PetscTruth ignorezeroentries = a->ignorezeroentries; 32030770e4dSSatish Balay Mat B = aij->B; 32130770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 322d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 323a77337e4SBarry Smith MatScalar *ba = b->a; 32430770e4dSSatish Balay 325fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 326fd3458f5SBarry Smith PetscInt nonew = a->nonew; 327a77337e4SBarry Smith MatScalar *ap1,*ap2; 3284ee7247eSSatish Balay 3293a40ed3dSBarry Smith PetscFunctionBegin; 3308a729477SBarry Smith for (i=0; i<m; i++) { 3315ef9f2a5SBarry Smith if (im[i] < 0) continue; 3322515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 333d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 3340a198c4cSBarry Smith #endif 3354b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 3364b0e389bSBarry Smith row = im[i] - rstart; 337fd3458f5SBarry Smith lastcol1 = -1; 338fd3458f5SBarry Smith rp1 = aj + ai[row]; 339fd3458f5SBarry Smith ap1 = aa + ai[row]; 340fd3458f5SBarry Smith rmax1 = aimax[row]; 341fd3458f5SBarry Smith nrow1 = ailen[row]; 342fd3458f5SBarry Smith low1 = 0; 343fd3458f5SBarry Smith high1 = nrow1; 344fd3458f5SBarry Smith lastcol2 = -1; 345fd3458f5SBarry Smith rp2 = bj + bi[row]; 346d498b1e9SBarry Smith ap2 = ba + bi[row]; 347fd3458f5SBarry Smith rmax2 = bimax[row]; 348d498b1e9SBarry Smith nrow2 = bilen[row]; 349fd3458f5SBarry Smith low2 = 0; 350fd3458f5SBarry Smith high2 = nrow2; 351fd3458f5SBarry Smith 3521eb62cbbSBarry Smith for (j=0; j<n; j++) { 35316371a99SBarry Smith if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0; 354abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 355fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend){ 356fd3458f5SBarry Smith col = in[j] - cstart; 35730770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 358273d9f13SBarry Smith } else if (in[j] < 0) continue; 3592515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 360d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 3610a198c4cSBarry Smith #endif 3621eb62cbbSBarry Smith else { 363227d817aSBarry Smith if (mat->was_assembled) { 364905e6a2fSBarry Smith if (!aij->colmap) { 365905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 366905e6a2fSBarry Smith } 367aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 3680f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 369fa46199cSSatish Balay col--; 370b1fc9764SSatish Balay #else 371905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 372b1fc9764SSatish Balay #endif 373ec8511deSBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 3742493cbb0SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 3754b0e389bSBarry Smith col = in[j]; 3769bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 377f9508a3cSSatish Balay B = aij->B; 378f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 379e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 380d498b1e9SBarry Smith rp2 = bj + bi[row]; 381d498b1e9SBarry Smith ap2 = ba + bi[row]; 382d498b1e9SBarry Smith rmax2 = bimax[row]; 383d498b1e9SBarry Smith nrow2 = bilen[row]; 384d498b1e9SBarry Smith low2 = 0; 385d498b1e9SBarry Smith high2 = nrow2; 386d0f46423SBarry Smith bm = aij->B->rmap->n; 387f9508a3cSSatish Balay ba = b->a; 388d6dfbf8fSBarry Smith } 389c48de900SBarry Smith } else col = in[j]; 39030770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 3911eb62cbbSBarry Smith } 3921eb62cbbSBarry Smith } 3935ef9f2a5SBarry Smith } else { 39490f02eecSBarry Smith if (!aij->donotstash) { 395d36fbae8SSatish Balay if (roworiented) { 3965b8514ebSBarry Smith if (ignorezeroentries && v[i*n] == 0.0) continue; 3978798bf22SSatish Balay ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr); 398d36fbae8SSatish Balay } else { 3995b8514ebSBarry Smith if (ignorezeroentries && v[i] == 0.0) continue; 4008798bf22SSatish Balay ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr); 4014b0e389bSBarry Smith } 4021eb62cbbSBarry Smith } 4038a729477SBarry Smith } 40490f02eecSBarry Smith } 4053a40ed3dSBarry Smith PetscFunctionReturn(0); 4068a729477SBarry Smith } 4078a729477SBarry Smith 4084a2ae208SSatish Balay #undef __FUNCT__ 4094a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 410b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 411b49de8d1SLois Curfman McInnes { 412b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 413dfbe8321SBarry Smith PetscErrorCode ierr; 414d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 415d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 416b49de8d1SLois Curfman McInnes 4173a40ed3dSBarry Smith PetscFunctionBegin; 418b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 41997e567efSBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 420d0f46423SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 421b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 422b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 423b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 42497e567efSBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 425d0f46423SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 426b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend){ 427b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 428b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 429fa852ad4SSatish Balay } else { 430905e6a2fSBarry Smith if (!aij->colmap) { 431905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 432905e6a2fSBarry Smith } 433aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 4340f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 435fa46199cSSatish Balay col --; 436b1fc9764SSatish Balay #else 437905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 438b1fc9764SSatish Balay #endif 439e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 440d9d09a02SSatish Balay else { 441b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 442b49de8d1SLois Curfman McInnes } 443b49de8d1SLois Curfman McInnes } 444b49de8d1SLois Curfman McInnes } 445a8c6a408SBarry Smith } else { 44629bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Only local values currently supported"); 447b49de8d1SLois Curfman McInnes } 448b49de8d1SLois Curfman McInnes } 4493a40ed3dSBarry Smith PetscFunctionReturn(0); 450b49de8d1SLois Curfman McInnes } 451bc5ccf88SSatish Balay 4524a2ae208SSatish Balay #undef __FUNCT__ 4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 455bc5ccf88SSatish Balay { 456bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 457dfbe8321SBarry Smith PetscErrorCode ierr; 458b1d57f15SBarry Smith PetscInt nstash,reallocs; 459bc5ccf88SSatish Balay InsertMode addv; 460bc5ccf88SSatish Balay 461bc5ccf88SSatish Balay PetscFunctionBegin; 462bc5ccf88SSatish Balay if (aij->donotstash) { 463bc5ccf88SSatish Balay PetscFunctionReturn(0); 464bc5ccf88SSatish Balay } 465bc5ccf88SSatish Balay 466bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 4677adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 468bc5ccf88SSatish Balay if (addv == (ADD_VALUES|INSERT_VALUES)) { 46929bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 470bc5ccf88SSatish Balay } 471bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 472bc5ccf88SSatish Balay 473d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 4748798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 475ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 476bc5ccf88SSatish Balay PetscFunctionReturn(0); 477bc5ccf88SSatish Balay } 478bc5ccf88SSatish Balay 4794a2ae208SSatish Balay #undef __FUNCT__ 4804a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 481dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 482bc5ccf88SSatish Balay { 483bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48491c97fd4SSatish Balay Mat_SeqAIJ *a=(Mat_SeqAIJ *)aij->A->data; 4856849ba73SBarry Smith PetscErrorCode ierr; 486b1d57f15SBarry Smith PetscMPIInt n; 487b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 488e44c0bd4SBarry Smith PetscInt *row,*col; 489e44c0bd4SBarry Smith PetscTruth other_disassembled; 49087828ca2SBarry Smith PetscScalar *val; 491bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 492bc5ccf88SSatish Balay 49391c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */ 494bc5ccf88SSatish Balay PetscFunctionBegin; 495bc5ccf88SSatish Balay if (!aij->donotstash) { 496a2d1c673SSatish Balay while (1) { 4978798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 498a2d1c673SSatish Balay if (!flg) break; 499a2d1c673SSatish Balay 500bc5ccf88SSatish Balay for (i=0; i<n;) { 501bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 502bc5ccf88SSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 503bc5ccf88SSatish Balay if (j < n) ncols = j-i; 504bc5ccf88SSatish Balay else ncols = n-i; 505bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 506bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 507bc5ccf88SSatish Balay i = j; 508bc5ccf88SSatish Balay } 509bc5ccf88SSatish Balay } 5108798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 511bc5ccf88SSatish Balay } 5122f53aa61SHong Zhang a->compressedrow.use = PETSC_FALSE; 513bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 514bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 515bc5ccf88SSatish Balay 516bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 517bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 518bc5ccf88SSatish Balay /* 519bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 520bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 521bc5ccf88SSatish Balay */ 522bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 5237adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 524bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 525bc5ccf88SSatish Balay ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 526ad59fb31SSatish Balay } 527ad59fb31SSatish Balay } 528bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 529bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 530bc5ccf88SSatish Balay } 5314e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 53291c97fd4SSatish Balay ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */ 533bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 534bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 535bc5ccf88SSatish Balay 536606d414cSSatish Balay ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 537606d414cSSatish Balay aij->rowvalues = 0; 538a30b2313SHong Zhang 539a30b2313SHong Zhang /* used by MatAXPY() */ 54091c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 54191c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 542a30b2313SHong Zhang 543bc5ccf88SSatish Balay PetscFunctionReturn(0); 544bc5ccf88SSatish Balay } 545bc5ccf88SSatish Balay 5464a2ae208SSatish Balay #undef __FUNCT__ 5474a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 548dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 5491eb62cbbSBarry Smith { 55044a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 551dfbe8321SBarry Smith PetscErrorCode ierr; 5523a40ed3dSBarry Smith 5533a40ed3dSBarry Smith PetscFunctionBegin; 55478b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 55578b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 5563a40ed3dSBarry Smith PetscFunctionReturn(0); 5571eb62cbbSBarry Smith } 5581eb62cbbSBarry Smith 5594a2ae208SSatish Balay #undef __FUNCT__ 5604a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 561f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag) 5621eb62cbbSBarry Smith { 56344a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 5646849ba73SBarry Smith PetscErrorCode ierr; 5657adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 566d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 567b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 568b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 569b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 570d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 5717adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 5721eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 5731eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 5746543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5756543fbbaSBarry Smith PetscTruth found = PETSC_FALSE; 5766543fbbaSBarry Smith #endif 5771eb62cbbSBarry Smith 5783a40ed3dSBarry Smith PetscFunctionBegin; 5791eb62cbbSBarry Smith /* first count number of contributors to each processor */ 580b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 581b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 582b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 5836543fbbaSBarry Smith j = 0; 5841eb62cbbSBarry Smith for (i=0; i<N; i++) { 5856543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 5866543fbbaSBarry Smith lastidx = idx; 5876543fbbaSBarry Smith for (; j<size; j++) { 5881eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 5896543fbbaSBarry Smith nprocs[2*j]++; 5906543fbbaSBarry Smith nprocs[2*j+1] = 1; 5916543fbbaSBarry Smith owner[i] = j; 5926543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5936543fbbaSBarry Smith found = PETSC_TRUE; 5946543fbbaSBarry Smith #endif 5956543fbbaSBarry Smith break; 5961eb62cbbSBarry Smith } 5971eb62cbbSBarry Smith } 5986543fbbaSBarry Smith #if defined(PETSC_DEBUG) 59929bbc08cSBarry Smith if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 6006543fbbaSBarry Smith found = PETSC_FALSE; 6016543fbbaSBarry Smith #endif 6021eb62cbbSBarry Smith } 603c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 6041eb62cbbSBarry Smith 6051eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 606c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 6071eb62cbbSBarry Smith 6081eb62cbbSBarry Smith /* post receives: */ 609b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 610b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 6111eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 612b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 6131eb62cbbSBarry Smith } 6141eb62cbbSBarry Smith 6151eb62cbbSBarry Smith /* do sends: 6161eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 6171eb62cbbSBarry Smith the ith processor 6181eb62cbbSBarry Smith */ 619b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 620b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 621b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 6221eb62cbbSBarry Smith starts[0] = 0; 623c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6241eb62cbbSBarry Smith for (i=0; i<N; i++) { 6251eb62cbbSBarry Smith svalues[starts[owner[i]]++] = rows[i]; 6261eb62cbbSBarry Smith } 6271eb62cbbSBarry Smith 6281eb62cbbSBarry Smith starts[0] = 0; 629c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6301eb62cbbSBarry Smith count = 0; 63117699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 632c1dc657dSBarry Smith if (nprocs[2*i+1]) { 633b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 6341eb62cbbSBarry Smith } 6351eb62cbbSBarry Smith } 636606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 6371eb62cbbSBarry Smith 63817699dbbSLois Curfman McInnes base = owners[rank]; 6391eb62cbbSBarry Smith 6401eb62cbbSBarry Smith /* wait on receives */ 641b1d57f15SBarry Smith ierr = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr); 6421eb62cbbSBarry Smith source = lens + nrecvs; 6431eb62cbbSBarry Smith count = nrecvs; slen = 0; 6441eb62cbbSBarry Smith while (count) { 645ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 6461eb62cbbSBarry Smith /* unpack receives into our local space */ 647b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 648d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 649d6dfbf8fSBarry Smith lens[imdex] = n; 6501eb62cbbSBarry Smith slen += n; 6511eb62cbbSBarry Smith count--; 6521eb62cbbSBarry Smith } 653606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 6541eb62cbbSBarry Smith 6551eb62cbbSBarry Smith /* move the data into the send scatter */ 656b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 6571eb62cbbSBarry Smith count = 0; 6581eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 6591eb62cbbSBarry Smith values = rvalues + i*nmax; 6601eb62cbbSBarry Smith for (j=0; j<lens[i]; j++) { 6611eb62cbbSBarry Smith lrows[count++] = values[j] - base; 6621eb62cbbSBarry Smith } 6631eb62cbbSBarry Smith } 664606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 665606d414cSSatish Balay ierr = PetscFree(lens);CHKERRQ(ierr); 666606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 667606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 6681eb62cbbSBarry Smith 6691eb62cbbSBarry Smith /* actually zap the local rows */ 6706eb55b6aSBarry Smith /* 6716eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 672a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 6736eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 6746eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 6756eb55b6aSBarry Smith 676f4df32b1SMatthew Knepley Contributed by: Matthew Knepley 6776eb55b6aSBarry Smith */ 678e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 679f4df32b1SMatthew Knepley ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr); 680d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 681f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr); 682f4df32b1SMatthew Knepley } else if (diag != 0.0) { 683f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 684fa46199cSSatish Balay if (((Mat_SeqAIJ*)l->A->data)->nonew) { 68529bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\ 686512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 6876525c446SSatish Balay } 688e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 689e2d53e46SBarry Smith row = lrows[i] + rstart; 690f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 691e2d53e46SBarry Smith } 692e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 693e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 6946eb55b6aSBarry Smith } else { 695f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 6966eb55b6aSBarry Smith } 697606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 69872dacd9aSBarry Smith 6991eb62cbbSBarry Smith /* wait on sends */ 7001eb62cbbSBarry Smith if (nsends) { 701b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 702ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 703606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 7041eb62cbbSBarry Smith } 705606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 706606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 7071eb62cbbSBarry Smith 7083a40ed3dSBarry Smith PetscFunctionReturn(0); 7091eb62cbbSBarry Smith } 7101eb62cbbSBarry Smith 7114a2ae208SSatish Balay #undef __FUNCT__ 7124a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 713dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 7141eb62cbbSBarry Smith { 715416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 716dfbe8321SBarry Smith PetscErrorCode ierr; 717b1d57f15SBarry Smith PetscInt nt; 718416022c9SBarry Smith 7193a40ed3dSBarry Smith PetscFunctionBegin; 720a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 721d0f46423SBarry Smith if (nt != A->cmap->n) { 722d0f46423SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 723fbd6ef76SBarry Smith } 724ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 725f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 726ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 727f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 7283a40ed3dSBarry Smith PetscFunctionReturn(0); 7291eb62cbbSBarry Smith } 7301eb62cbbSBarry Smith 7314a2ae208SSatish Balay #undef __FUNCT__ 7324a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 733dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 734da3a660dSBarry Smith { 735416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 736dfbe8321SBarry Smith PetscErrorCode ierr; 7373a40ed3dSBarry Smith 7383a40ed3dSBarry Smith PetscFunctionBegin; 739ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 740f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 741ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 742f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 7433a40ed3dSBarry Smith PetscFunctionReturn(0); 744da3a660dSBarry Smith } 745da3a660dSBarry Smith 7464a2ae208SSatish Balay #undef __FUNCT__ 7474a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 748dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 749da3a660dSBarry Smith { 750416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 751dfbe8321SBarry Smith PetscErrorCode ierr; 752a5ff213dSBarry Smith PetscTruth merged; 753da3a660dSBarry Smith 7543a40ed3dSBarry Smith PetscFunctionBegin; 755a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 756da3a660dSBarry Smith /* do nondiagonal part */ 7577c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 758a5ff213dSBarry Smith if (!merged) { 759da3a660dSBarry Smith /* send it on its way */ 760ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 761da3a660dSBarry Smith /* do local part */ 7627c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 763da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 764a5ff213dSBarry Smith /* added in yy until the next line, */ 765ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 766a5ff213dSBarry Smith } else { 767a5ff213dSBarry Smith /* do local part */ 768a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 769a5ff213dSBarry Smith /* send it on its way */ 770ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 771a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 772ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 773a5ff213dSBarry Smith } 7743a40ed3dSBarry Smith PetscFunctionReturn(0); 775da3a660dSBarry Smith } 776da3a660dSBarry Smith 777cd0d46ebSvictorle EXTERN_C_BEGIN 778cd0d46ebSvictorle #undef __FUNCT__ 7795fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 78013c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f) 781cd0d46ebSvictorle { 7824f423910Svictorle MPI_Comm comm; 783cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ *) Amat->data, *Bij; 78466501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 785cd0d46ebSvictorle IS Me,Notme; 7866849ba73SBarry Smith PetscErrorCode ierr; 787b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 788b1d57f15SBarry Smith PetscMPIInt size; 789cd0d46ebSvictorle 790cd0d46ebSvictorle PetscFunctionBegin; 79142e5f5b4Svictorle 79242e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 79366501d38Svictorle Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A; 7945485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 795cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 7964f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 797b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 798b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 79942e5f5b4Svictorle 80042e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 801cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 802cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 803b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 804cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 805cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 806268466fbSBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr); 807268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 808268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 80966501d38Svictorle Aoff = Aoffs[0]; 810268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 81166501d38Svictorle Boff = Boffs[0]; 8125485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 81366501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 81466501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 81542e5f5b4Svictorle ierr = ISDestroy(Me);CHKERRQ(ierr); 81642e5f5b4Svictorle ierr = ISDestroy(Notme);CHKERRQ(ierr); 81742e5f5b4Svictorle 818cd0d46ebSvictorle PetscFunctionReturn(0); 819cd0d46ebSvictorle } 820cd0d46ebSvictorle EXTERN_C_END 821cd0d46ebSvictorle 8224a2ae208SSatish Balay #undef __FUNCT__ 8234a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 824dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 825da3a660dSBarry Smith { 826416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 827dfbe8321SBarry Smith PetscErrorCode ierr; 828da3a660dSBarry Smith 8293a40ed3dSBarry Smith PetscFunctionBegin; 830da3a660dSBarry Smith /* do nondiagonal part */ 8317c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 832da3a660dSBarry Smith /* send it on its way */ 833ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 834da3a660dSBarry Smith /* do local part */ 8357c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 836a5ff213dSBarry Smith /* receive remote parts */ 837ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 8383a40ed3dSBarry Smith PetscFunctionReturn(0); 839da3a660dSBarry Smith } 840da3a660dSBarry Smith 8411eb62cbbSBarry Smith /* 8421eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 8431eb62cbbSBarry Smith diagonal block 8441eb62cbbSBarry Smith */ 8454a2ae208SSatish Balay #undef __FUNCT__ 8464a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 847dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 8481eb62cbbSBarry Smith { 849dfbe8321SBarry Smith PetscErrorCode ierr; 850416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 8513a40ed3dSBarry Smith 8523a40ed3dSBarry Smith PetscFunctionBegin; 853d0f46423SBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 854d0f46423SBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) { 85529bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 8563a40ed3dSBarry Smith } 8573a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 8583a40ed3dSBarry Smith PetscFunctionReturn(0); 8591eb62cbbSBarry Smith } 8601eb62cbbSBarry Smith 8614a2ae208SSatish Balay #undef __FUNCT__ 8624a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 863f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 864052efed2SBarry Smith { 865052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 866dfbe8321SBarry Smith PetscErrorCode ierr; 8673a40ed3dSBarry Smith 8683a40ed3dSBarry Smith PetscFunctionBegin; 869f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 870f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 8713a40ed3dSBarry Smith PetscFunctionReturn(0); 872052efed2SBarry Smith } 873052efed2SBarry Smith 8744a2ae208SSatish Balay #undef __FUNCT__ 8754a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 876dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 8771eb62cbbSBarry Smith { 87844a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 879dfbe8321SBarry Smith PetscErrorCode ierr; 88083e2fdc7SBarry Smith 8813a40ed3dSBarry Smith PetscFunctionBegin; 882aa482453SBarry Smith #if defined(PETSC_USE_LOG) 883d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 884a5a9c739SBarry Smith #endif 8858798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 88678b31e54SBarry Smith ierr = MatDestroy(aij->A);CHKERRQ(ierr); 88778b31e54SBarry Smith ierr = MatDestroy(aij->B);CHKERRQ(ierr); 888aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 8899c666560SBarry Smith if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);} 890b1fc9764SSatish Balay #else 89105b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 892b1fc9764SSatish Balay #endif 89305b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 8947c922b88SBarry Smith if (aij->lvec) {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);} 8957c922b88SBarry Smith if (aij->Mvctx) {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);} 89605b42c5fSBarry Smith ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 8978aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 898606d414cSSatish Balay ierr = PetscFree(aij);CHKERRQ(ierr); 899901853e0SKris Buschelman 900dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 901901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 902901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 903901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 904901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr); 905901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 906ff69c46cSKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr); 907901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr); 9083a40ed3dSBarry Smith PetscFunctionReturn(0); 9091eb62cbbSBarry Smith } 910ee50ffe9SBarry Smith 9114a2ae208SSatish Balay #undef __FUNCT__ 9128e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 913dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 9148e2fed03SBarry Smith { 9158e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 9168e2fed03SBarry Smith Mat_SeqAIJ* A = (Mat_SeqAIJ*)aij->A->data; 9178e2fed03SBarry Smith Mat_SeqAIJ* B = (Mat_SeqAIJ*)aij->B->data; 9186849ba73SBarry Smith PetscErrorCode ierr; 91932dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 9206f69ff64SBarry Smith int fd; 921a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 922d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 9238e2fed03SBarry Smith PetscScalar *column_values; 9248e2fed03SBarry Smith 9258e2fed03SBarry Smith PetscFunctionBegin; 9267adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 9277adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 9288e2fed03SBarry Smith nz = A->nz + B->nz; 929958c9bccSBarry Smith if (!rank) { 9308e2fed03SBarry Smith header[0] = MAT_FILE_COOKIE; 931d0f46423SBarry Smith header[1] = mat->rmap->N; 932d0f46423SBarry Smith header[2] = mat->cmap->N; 9337adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 9348e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 9356f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9368e2fed03SBarry Smith /* get largest number of rows any processor has */ 937d0f46423SBarry Smith rlen = mat->rmap->n; 938d0f46423SBarry Smith range = mat->rmap->range; 9398e2fed03SBarry Smith for (i=1; i<size; i++) { 9408e2fed03SBarry Smith rlen = PetscMax(rlen,range[i+1] - range[i]); 9418e2fed03SBarry Smith } 9428e2fed03SBarry Smith } else { 9437adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 944d0f46423SBarry Smith rlen = mat->rmap->n; 9458e2fed03SBarry Smith } 9468e2fed03SBarry Smith 9478e2fed03SBarry Smith /* load up the local row counts */ 948b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 949d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9508e2fed03SBarry Smith row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 9518e2fed03SBarry Smith } 9528e2fed03SBarry Smith 9538e2fed03SBarry Smith /* store the row lengths to the file */ 954958c9bccSBarry Smith if (!rank) { 9558e2fed03SBarry Smith MPI_Status status; 956d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9578e2fed03SBarry Smith for (i=1; i<size; i++) { 9588e2fed03SBarry Smith rlen = range[i+1] - range[i]; 9597adad957SLisandro Dalcin ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9606f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9618e2fed03SBarry Smith } 9628e2fed03SBarry Smith } else { 963d0f46423SBarry Smith ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9648e2fed03SBarry Smith } 9658e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 9668e2fed03SBarry Smith 9678e2fed03SBarry Smith /* load up the local column indices */ 9688e2fed03SBarry Smith nzmax = nz; /* )th processor needs space a largest processor needs */ 9697adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 970b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 9718e2fed03SBarry Smith cnt = 0; 972d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9738e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 9748e2fed03SBarry Smith if ( (col = garray[B->j[j]]) > cstart) break; 9758e2fed03SBarry Smith column_indices[cnt++] = col; 9768e2fed03SBarry Smith } 9778e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 9788e2fed03SBarry Smith column_indices[cnt++] = A->j[k] + cstart; 9798e2fed03SBarry Smith } 9808e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 9818e2fed03SBarry Smith column_indices[cnt++] = garray[B->j[j]]; 9828e2fed03SBarry Smith } 9838e2fed03SBarry Smith } 98477431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 9858e2fed03SBarry Smith 9868e2fed03SBarry Smith /* store the column indices to the file */ 987958c9bccSBarry Smith if (!rank) { 9888e2fed03SBarry Smith MPI_Status status; 9896f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9908e2fed03SBarry Smith for (i=1; i<size; i++) { 9917adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 99277431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 9937adad957SLisandro Dalcin ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9946f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9958e2fed03SBarry Smith } 9968e2fed03SBarry Smith } else { 9977adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9987adad957SLisandro Dalcin ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9998e2fed03SBarry Smith } 10008e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 10018e2fed03SBarry Smith 10028e2fed03SBarry Smith /* load up the local column values */ 10038e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 10048e2fed03SBarry Smith cnt = 0; 1005d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 10068e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 10078e2fed03SBarry Smith if ( garray[B->j[j]] > cstart) break; 10088e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10098e2fed03SBarry Smith } 10108e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 10118e2fed03SBarry Smith column_values[cnt++] = A->a[k]; 10128e2fed03SBarry Smith } 10138e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 10148e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10158e2fed03SBarry Smith } 10168e2fed03SBarry Smith } 101777431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10188e2fed03SBarry Smith 10198e2fed03SBarry Smith /* store the column values to the file */ 1020958c9bccSBarry Smith if (!rank) { 10218e2fed03SBarry Smith MPI_Status status; 10226f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10238e2fed03SBarry Smith for (i=1; i<size; i++) { 10247adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 102577431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10267adad957SLisandro Dalcin ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10276f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10288e2fed03SBarry Smith } 10298e2fed03SBarry Smith } else { 10307adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10317adad957SLisandro Dalcin ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10328e2fed03SBarry Smith } 10338e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 10348e2fed03SBarry Smith PetscFunctionReturn(0); 10358e2fed03SBarry Smith } 10368e2fed03SBarry Smith 10378e2fed03SBarry Smith #undef __FUNCT__ 10384a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1039dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1040416022c9SBarry Smith { 104144a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1042dfbe8321SBarry Smith PetscErrorCode ierr; 104332dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1044d38fa0fbSBarry Smith PetscTruth isdraw,iascii,isbinary; 1045b0a32e0cSBarry Smith PetscViewer sviewer; 1046f3ef73ceSBarry Smith PetscViewerFormat format; 1047416022c9SBarry Smith 10483a40ed3dSBarry Smith PetscFunctionBegin; 1049fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 105032077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 10518e2fed03SBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 105232077d6dSBarry Smith if (iascii) { 1053b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1054456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 10554e220ebcSLois Curfman McInnes MatInfo info; 1056923f20ffSKris Buschelman PetscTruth inodes; 1057923f20ffSKris Buschelman 10587adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 1059888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 1060923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr); 1061923f20ffSKris Buschelman if (!inodes) { 106277431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1063d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10646831982aSBarry Smith } else { 106577431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1066d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10676831982aSBarry Smith } 1068888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 106977431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1070888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 107177431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1072b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 107307d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1074a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 10753a40ed3dSBarry Smith PetscFunctionReturn(0); 1076fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1077923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1078923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1079923f20ffSKris Buschelman if (inodes) { 1080923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1081d38fa0fbSBarry Smith } else { 1082d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1083d38fa0fbSBarry Smith } 10843a40ed3dSBarry Smith PetscFunctionReturn(0); 10854aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 10864aedb280SBarry Smith PetscFunctionReturn(0); 108708480c60SBarry Smith } 10888e2fed03SBarry Smith } else if (isbinary) { 10898e2fed03SBarry Smith if (size == 1) { 10907adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 10918e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 10928e2fed03SBarry Smith } else { 10938e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 10948e2fed03SBarry Smith } 10958e2fed03SBarry Smith PetscFunctionReturn(0); 10960f5bd95cSBarry Smith } else if (isdraw) { 1097b0a32e0cSBarry Smith PetscDraw draw; 109819bcc07fSBarry Smith PetscTruth isnull; 1099b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1100b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 110119bcc07fSBarry Smith } 110219bcc07fSBarry Smith 110317699dbbSLois Curfman McInnes if (size == 1) { 11047adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 110578b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11063a40ed3dSBarry Smith } else { 110795373324SBarry Smith /* assemble the entire matrix onto first processor. */ 110895373324SBarry Smith Mat A; 1109ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1110d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1111dd6ea824SBarry Smith MatScalar *a; 11122ee70a88SLois Curfman McInnes 1113d0f46423SBarry Smith if (mat->rmap->N > 1024) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 512 rows, use binary format instead"); 11140805154bSBarry Smith 11157adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 111617699dbbSLois Curfman McInnes if (!rank) { 1117f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 11183a40ed3dSBarry Smith } else { 1119f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 112095373324SBarry Smith } 1121f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1122f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 1123f204ca49SKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 112452e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1125416022c9SBarry Smith 112695373324SBarry Smith /* copy over the A part */ 1127ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1128d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1129d0f46423SBarry Smith row = mat->rmap->rstart; 1130d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;} 113195373324SBarry Smith for (i=0; i<m; i++) { 1132416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 113395373324SBarry Smith row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 113495373324SBarry Smith } 11352ee70a88SLois Curfman McInnes aj = Aloc->j; 1136d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;} 113795373324SBarry Smith 113895373324SBarry Smith /* copy over the B part */ 1139ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1140d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1141d0f46423SBarry Smith row = mat->rmap->rstart; 1142b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1143b0a32e0cSBarry Smith ct = cols; 1144bfec09a0SHong Zhang for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];} 114595373324SBarry Smith for (i=0; i<m; i++) { 1146416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 114795373324SBarry Smith row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 114895373324SBarry Smith } 1149606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 11506d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 11516d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 115255843e3eSBarry Smith /* 115355843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1154b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 115555843e3eSBarry Smith */ 1156b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1157e03a110bSBarry Smith if (!rank) { 11587adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11596831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 116095373324SBarry Smith } 1161b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 116278b31e54SBarry Smith ierr = MatDestroy(A);CHKERRQ(ierr); 116395373324SBarry Smith } 11643a40ed3dSBarry Smith PetscFunctionReturn(0); 11651eb62cbbSBarry Smith } 11661eb62cbbSBarry Smith 11674a2ae208SSatish Balay #undef __FUNCT__ 11684a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1169dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1170416022c9SBarry Smith { 1171dfbe8321SBarry Smith PetscErrorCode ierr; 117232077d6dSBarry Smith PetscTruth iascii,isdraw,issocket,isbinary; 1173416022c9SBarry Smith 11743a40ed3dSBarry Smith PetscFunctionBegin; 117532077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 1176fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 1177fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 1178b0a32e0cSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr); 117932077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 11807b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 11815cd90555SBarry Smith } else { 118279a5c55eSBarry Smith SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name); 1183416022c9SBarry Smith } 11843a40ed3dSBarry Smith PetscFunctionReturn(0); 1185416022c9SBarry Smith } 1186416022c9SBarry Smith 11874a2ae208SSatish Balay #undef __FUNCT__ 11884a2ae208SSatish Balay #define __FUNCT__ "MatRelax_MPIAIJ" 1189b1d57f15SBarry Smith PetscErrorCode MatRelax_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 11908a729477SBarry Smith { 119144a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1192dfbe8321SBarry Smith PetscErrorCode ierr; 1193c14dc6b6SHong Zhang Vec bb1; 11948a729477SBarry Smith 11953a40ed3dSBarry Smith PetscFunctionBegin; 1196c14dc6b6SHong Zhang ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 11972798e883SHong Zhang 1198c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 1199da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 1200bd3bf7d3SHong Zhang ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,lits,xx);CHKERRQ(ierr); 12012798e883SHong Zhang its--; 1202da3a660dSBarry Smith } 12032798e883SHong Zhang 12042798e883SHong Zhang while (its--) { 1205ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1206ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12072798e883SHong Zhang 1208c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1209efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1210c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12112798e883SHong Zhang 1212c14dc6b6SHong Zhang /* local sweep */ 121371f1c65dSBarry Smith ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,lits,xx);CHKERRQ(ierr); 12142798e883SHong Zhang } 12153a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP){ 1216da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 1217c14dc6b6SHong Zhang ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12182798e883SHong Zhang its--; 1219da3a660dSBarry Smith } 12202798e883SHong Zhang while (its--) { 1221ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1222ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12232798e883SHong Zhang 1224c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1225efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1226c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1227c14dc6b6SHong Zhang 1228c14dc6b6SHong Zhang /* local sweep */ 122971f1c65dSBarry Smith ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12302798e883SHong Zhang } 12313a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){ 1232da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 1233c14dc6b6SHong Zhang ierr = (*mat->A->ops->relax)(mat->A,bb,omega,flag,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12342798e883SHong Zhang its--; 1235da3a660dSBarry Smith } 12362798e883SHong Zhang while (its--) { 1237ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1238ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12392798e883SHong Zhang 1240c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1241efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1242c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12432798e883SHong Zhang 1244c14dc6b6SHong Zhang /* local sweep */ 124571f1c65dSBarry Smith ierr = (*mat->A->ops->relax)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,PETSC_NULL,xx);CHKERRQ(ierr); 12462798e883SHong Zhang } 12473a40ed3dSBarry Smith } else { 124829bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported"); 1249c16cb8f2SBarry Smith } 1250c14dc6b6SHong Zhang 1251c14dc6b6SHong Zhang ierr = VecDestroy(bb1);CHKERRQ(ierr); 12523a40ed3dSBarry Smith PetscFunctionReturn(0); 12538a729477SBarry Smith } 1254a66be287SLois Curfman McInnes 12554a2ae208SSatish Balay #undef __FUNCT__ 125642e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 125742e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 125842e855d1Svictor { 125942e855d1Svictor MPI_Comm comm,pcomm; 12605d0c19d7SBarry Smith PetscInt first,local_size,nrows; 12615d0c19d7SBarry Smith const PetscInt *rows; 1262*dbf0e21dSBarry Smith PetscMPIInt size; 126342e855d1Svictor IS crowp,growp,irowp,lrowp,lcolp,icolp; 126442e855d1Svictor PetscErrorCode ierr; 126542e855d1Svictor 126642e855d1Svictor PetscFunctionBegin; 126742e855d1Svictor ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 126842e855d1Svictor /* make a collective version of 'rowp' */ 126942e855d1Svictor ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr); 127042e855d1Svictor if (pcomm==comm) { 127142e855d1Svictor crowp = rowp; 127242e855d1Svictor } else { 127342e855d1Svictor ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr); 127442e855d1Svictor ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr); 127542e855d1Svictor ierr = ISCreateGeneral(comm,nrows,rows,&crowp);CHKERRQ(ierr); 127642e855d1Svictor ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr); 127742e855d1Svictor } 127842e855d1Svictor /* collect the global row permutation and invert it */ 127942e855d1Svictor ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr); 128042e855d1Svictor ierr = ISSetPermutation(growp);CHKERRQ(ierr); 128142e855d1Svictor if (pcomm!=comm) { 128242e855d1Svictor ierr = ISDestroy(crowp);CHKERRQ(ierr); 128342e855d1Svictor } 128442e855d1Svictor ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr); 128542e855d1Svictor /* get the local target indices */ 128642e855d1Svictor ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr); 128742e855d1Svictor ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr); 128842e855d1Svictor ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr); 128942e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp);CHKERRQ(ierr); 129042e855d1Svictor ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr); 129142e855d1Svictor ierr = ISDestroy(irowp);CHKERRQ(ierr); 129242e855d1Svictor /* the column permutation is so much easier; 129342e855d1Svictor make a local version of 'colp' and invert it */ 129442e855d1Svictor ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr); 1295*dbf0e21dSBarry Smith ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr); 1296*dbf0e21dSBarry Smith if (size==1) { 129742e855d1Svictor lcolp = colp; 129842e855d1Svictor } else { 129942e855d1Svictor ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr); 130042e855d1Svictor ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr); 130142e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp);CHKERRQ(ierr); 130242e855d1Svictor } 1303*dbf0e21dSBarry Smith ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 130442e855d1Svictor ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr); 130542e855d1Svictor ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 1306*dbf0e21dSBarry Smith if (size>1) { 130742e855d1Svictor ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr); 130842e855d1Svictor ierr = ISDestroy(lcolp);CHKERRQ(ierr); 130942e855d1Svictor } 131042e855d1Svictor /* now we just get the submatrix */ 131142e855d1Svictor ierr = MatGetSubMatrix(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr); 131242e855d1Svictor /* clean up */ 131342e855d1Svictor ierr = ISDestroy(lrowp);CHKERRQ(ierr); 131442e855d1Svictor ierr = ISDestroy(icolp);CHKERRQ(ierr); 131542e855d1Svictor PetscFunctionReturn(0); 131642e855d1Svictor } 131742e855d1Svictor 131842e855d1Svictor #undef __FUNCT__ 13194a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1320dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1321a66be287SLois Curfman McInnes { 1322a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1323a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1324dfbe8321SBarry Smith PetscErrorCode ierr; 1325329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1326a66be287SLois Curfman McInnes 13273a40ed3dSBarry Smith PetscFunctionBegin; 13284e220ebcSLois Curfman McInnes info->block_size = 1.0; 13294e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 13304e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 13314e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 13324e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 13334e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 13344e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1335a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 13364e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 13374e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 13384e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 13394e220ebcSLois Curfman McInnes info->memory = isend[3]; 13404e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1341a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 13427adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 13434e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 13444e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 13454e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 13464e220ebcSLois Curfman McInnes info->memory = irecv[3]; 13474e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1348a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 13497adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 13504e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 13514e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 13524e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 13534e220ebcSLois Curfman McInnes info->memory = irecv[3]; 13544e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1355a66be287SLois Curfman McInnes } 13564e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 13574e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 13584e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 13594e220ebcSLois Curfman McInnes 13603a40ed3dSBarry Smith PetscFunctionReturn(0); 1361a66be287SLois Curfman McInnes } 1362a66be287SLois Curfman McInnes 13634a2ae208SSatish Balay #undef __FUNCT__ 13644a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 13654e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg) 1366c74985f6SBarry Smith { 1367c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1368dfbe8321SBarry Smith PetscErrorCode ierr; 1369c74985f6SBarry Smith 13703a40ed3dSBarry Smith PetscFunctionBegin; 137112c028f9SKris Buschelman switch (op) { 1372512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 137312c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 137428b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 137512c028f9SKris Buschelman case MAT_KEEP_ZEROED_ROWS: 137612c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 137712c028f9SKris Buschelman case MAT_USE_INODES: 137812c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 13794e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 13804e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 138112c028f9SKris Buschelman break; 138212c028f9SKris Buschelman case MAT_ROW_ORIENTED: 13834e0d8c25SBarry Smith a->roworiented = flg; 13844e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 13854e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 138612c028f9SKris Buschelman break; 13874e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1388290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 138912c028f9SKris Buschelman break; 139012c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 13917c922b88SBarry Smith a->donotstash = PETSC_TRUE; 139212c028f9SKris Buschelman break; 139377e54ba9SKris Buschelman case MAT_SYMMETRIC: 13944e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 139525f421beSHong Zhang break; 139677e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1397bf108f30SBarry Smith case MAT_HERMITIAN: 1398bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 13994e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 140077e54ba9SKris Buschelman break; 140112c028f9SKris Buschelman default: 1402ad86a440SBarry Smith SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op); 14033a40ed3dSBarry Smith } 14043a40ed3dSBarry Smith PetscFunctionReturn(0); 1405c74985f6SBarry Smith } 1406c74985f6SBarry Smith 14074a2ae208SSatish Balay #undef __FUNCT__ 14084a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1409b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 141039e00950SLois Curfman McInnes { 1411154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 141287828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 14136849ba73SBarry Smith PetscErrorCode ierr; 1414d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1415d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1416b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 141739e00950SLois Curfman McInnes 14183a40ed3dSBarry Smith PetscFunctionBegin; 1419abc0a331SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active"); 14207a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 14217a0afa10SBarry Smith 142270f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 14237a0afa10SBarry Smith /* 14247a0afa10SBarry Smith allocate enough space to hold information from the longest row. 14257a0afa10SBarry Smith */ 14267a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1427b1d57f15SBarry Smith PetscInt max = 1,tmp; 1428d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 14297a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 14307a0afa10SBarry Smith if (max < tmp) { max = tmp; } 14317a0afa10SBarry Smith } 1432b1d57f15SBarry Smith ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr); 1433b1d57f15SBarry Smith mat->rowindices = (PetscInt*)(mat->rowvalues + max); 14347a0afa10SBarry Smith } 14357a0afa10SBarry Smith 143629bbc08cSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows") 1437abc0e9e4SLois Curfman McInnes lrow = row - rstart; 143839e00950SLois Curfman McInnes 1439154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1440154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1441154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1442f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1443f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1444154123eaSLois Curfman McInnes nztot = nzA + nzB; 1445154123eaSLois Curfman McInnes 144670f0671dSBarry Smith cmap = mat->garray; 1447154123eaSLois Curfman McInnes if (v || idx) { 1448154123eaSLois Curfman McInnes if (nztot) { 1449154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1450b1d57f15SBarry Smith PetscInt imark = -1; 1451154123eaSLois Curfman McInnes if (v) { 145270f0671dSBarry Smith *v = v_p = mat->rowvalues; 145339e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 145470f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1455154123eaSLois Curfman McInnes else break; 1456154123eaSLois Curfman McInnes } 1457154123eaSLois Curfman McInnes imark = i; 145870f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 145970f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1460154123eaSLois Curfman McInnes } 1461154123eaSLois Curfman McInnes if (idx) { 146270f0671dSBarry Smith *idx = idx_p = mat->rowindices; 146370f0671dSBarry Smith if (imark > -1) { 146470f0671dSBarry Smith for (i=0; i<imark; i++) { 146570f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 146670f0671dSBarry Smith } 146770f0671dSBarry Smith } else { 1468154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 146970f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1470154123eaSLois Curfman McInnes else break; 1471154123eaSLois Curfman McInnes } 1472154123eaSLois Curfman McInnes imark = i; 147370f0671dSBarry Smith } 147470f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 147570f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 147639e00950SLois Curfman McInnes } 14773f97c4b0SBarry Smith } else { 14781ca473b0SSatish Balay if (idx) *idx = 0; 14791ca473b0SSatish Balay if (v) *v = 0; 14801ca473b0SSatish Balay } 1481154123eaSLois Curfman McInnes } 148239e00950SLois Curfman McInnes *nz = nztot; 1483f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1484f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 14853a40ed3dSBarry Smith PetscFunctionReturn(0); 148639e00950SLois Curfman McInnes } 148739e00950SLois Curfman McInnes 14884a2ae208SSatish Balay #undef __FUNCT__ 14894a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1490b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 149139e00950SLois Curfman McInnes { 14927a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 14933a40ed3dSBarry Smith 14943a40ed3dSBarry Smith PetscFunctionBegin; 1495abc0a331SBarry Smith if (!aij->getrowactive) { 1496abc0a331SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 14977a0afa10SBarry Smith } 14987a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 14993a40ed3dSBarry Smith PetscFunctionReturn(0); 150039e00950SLois Curfman McInnes } 150139e00950SLois Curfman McInnes 15024a2ae208SSatish Balay #undef __FUNCT__ 15034a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 1504dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 1505855ac2c5SLois Curfman McInnes { 1506855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1507ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 1508dfbe8321SBarry Smith PetscErrorCode ierr; 1509d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 1510329f5518SBarry Smith PetscReal sum = 0.0; 1511a77337e4SBarry Smith MatScalar *v; 151204ca555eSLois Curfman McInnes 15133a40ed3dSBarry Smith PetscFunctionBegin; 151417699dbbSLois Curfman McInnes if (aij->size == 1) { 151514183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 151637fa93a5SLois Curfman McInnes } else { 151704ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 151804ca555eSLois Curfman McInnes v = amat->a; 151904ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 1520aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1521329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 152204ca555eSLois Curfman McInnes #else 152304ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 152404ca555eSLois Curfman McInnes #endif 152504ca555eSLois Curfman McInnes } 152604ca555eSLois Curfman McInnes v = bmat->a; 152704ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 1528aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1529329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 153004ca555eSLois Curfman McInnes #else 153104ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 153204ca555eSLois Curfman McInnes #endif 153304ca555eSLois Curfman McInnes } 15347adad957SLisandro Dalcin ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 153504ca555eSLois Curfman McInnes *norm = sqrt(*norm); 15363a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 1537329f5518SBarry Smith PetscReal *tmp,*tmp2; 1538b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 1539d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 1540d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 1541d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 154204ca555eSLois Curfman McInnes *norm = 0.0; 154304ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 154404ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 1545bfec09a0SHong Zhang tmp[cstart + *jj++ ] += PetscAbsScalar(*v); v++; 154604ca555eSLois Curfman McInnes } 154704ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 154804ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 1549bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 155004ca555eSLois Curfman McInnes } 1551d0f46423SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 1552d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 155304ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 155404ca555eSLois Curfman McInnes } 1555606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 1556606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 15573a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 1558329f5518SBarry Smith PetscReal ntemp = 0.0; 1559d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 1560bfec09a0SHong Zhang v = amat->a + amat->i[j]; 156104ca555eSLois Curfman McInnes sum = 0.0; 156204ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 1563cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 156404ca555eSLois Curfman McInnes } 1565bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 156604ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 1567cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 156804ca555eSLois Curfman McInnes } 1569515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 157004ca555eSLois Curfman McInnes } 15717adad957SLisandro Dalcin ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr); 1572ca161407SBarry Smith } else { 157329bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"No support for two norm"); 157404ca555eSLois Curfman McInnes } 157537fa93a5SLois Curfman McInnes } 15763a40ed3dSBarry Smith PetscFunctionReturn(0); 1577855ac2c5SLois Curfman McInnes } 1578855ac2c5SLois Curfman McInnes 15794a2ae208SSatish Balay #undef __FUNCT__ 15804a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 1581fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 1582b7c46309SBarry Smith { 1583b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1584da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 1585dfbe8321SBarry Smith PetscErrorCode ierr; 1586d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz; 1587d0f46423SBarry Smith PetscInt cstart=A->cmap->rstart,ncol; 15883a40ed3dSBarry Smith Mat B; 1589a77337e4SBarry Smith MatScalar *array; 1590b7c46309SBarry Smith 15913a40ed3dSBarry Smith PetscFunctionBegin; 1592e9695a30SBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 1593da668accSHong Zhang 1594d0f46423SBarry Smith ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; 1595da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 1596da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 1597fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 1598fc73b1b3SBarry Smith /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */ 1599fc73b1b3SBarry Smith ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 1600da668accSHong Zhang ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr); 1601da668accSHong Zhang for (i=0; i<ai[ma]; i++){ 1602da668accSHong Zhang d_nnz[aj[i]] ++; 1603da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 1604d4bb536fSBarry Smith } 1605d4bb536fSBarry Smith 16067adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 1607d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 16087adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1609da668accSHong Zhang ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr); 1610fc73b1b3SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 1611fc4dec0aSBarry Smith } else { 1612fc4dec0aSBarry Smith B = *matout; 1613fc4dec0aSBarry Smith } 1614b7c46309SBarry Smith 1615b7c46309SBarry Smith /* copy over the A part */ 1616da668accSHong Zhang array = Aloc->a; 1617d0f46423SBarry Smith row = A->rmap->rstart; 1618da668accSHong Zhang for (i=0; i<ma; i++) { 1619da668accSHong Zhang ncol = ai[i+1]-ai[i]; 1620da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 1621da668accSHong Zhang row++; array += ncol; aj += ncol; 1622b7c46309SBarry Smith } 1623b7c46309SBarry Smith aj = Aloc->j; 1624da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 1625b7c46309SBarry Smith 1626b7c46309SBarry Smith /* copy over the B part */ 1627fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1628fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 1629da668accSHong Zhang array = Bloc->a; 1630d0f46423SBarry Smith row = A->rmap->rstart; 1631da668accSHong Zhang for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];} 163261a2fbbaSHong Zhang cols_tmp = cols; 1633da668accSHong Zhang for (i=0; i<mb; i++) { 1634da668accSHong Zhang ncol = bi[i+1]-bi[i]; 163561a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 163661a2fbbaSHong Zhang row++; array += ncol; cols_tmp += ncol; 1637b7c46309SBarry Smith } 1638fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 1639fc73b1b3SBarry Smith 16406d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 16416d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1642815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 16430de55854SLois Curfman McInnes *matout = B; 16440de55854SLois Curfman McInnes } else { 1645273d9f13SBarry Smith ierr = MatHeaderCopy(A,B);CHKERRQ(ierr); 16460de55854SLois Curfman McInnes } 16473a40ed3dSBarry Smith PetscFunctionReturn(0); 1648b7c46309SBarry Smith } 1649b7c46309SBarry Smith 16504a2ae208SSatish Balay #undef __FUNCT__ 16514a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 1652dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 1653a008b906SSatish Balay { 16544b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 16554b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 1656dfbe8321SBarry Smith PetscErrorCode ierr; 1657b1d57f15SBarry Smith PetscInt s1,s2,s3; 1658a008b906SSatish Balay 16593a40ed3dSBarry Smith PetscFunctionBegin; 16604b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 16614b967eb1SSatish Balay if (rr) { 1662e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 166329bbc08cSBarry Smith if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 16644b967eb1SSatish Balay /* Overlap communication with computation. */ 1665ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1666a008b906SSatish Balay } 16674b967eb1SSatish Balay if (ll) { 1668e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 166929bbc08cSBarry Smith if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 1670f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 16714b967eb1SSatish Balay } 16724b967eb1SSatish Balay /* scale the diagonal block */ 1673f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 16744b967eb1SSatish Balay 16754b967eb1SSatish Balay if (rr) { 16764b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 1677ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1678f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 16794b967eb1SSatish Balay } 16804b967eb1SSatish Balay 16813a40ed3dSBarry Smith PetscFunctionReturn(0); 1682a008b906SSatish Balay } 1683a008b906SSatish Balay 16844a2ae208SSatish Balay #undef __FUNCT__ 1685521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ" 1686521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs) 16875a838052SSatish Balay { 1688521d7252SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1689521d7252SBarry Smith PetscErrorCode ierr; 1690521d7252SBarry Smith 16913a40ed3dSBarry Smith PetscFunctionBegin; 1692521d7252SBarry Smith ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr); 1693521d7252SBarry Smith ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr); 16943a40ed3dSBarry Smith PetscFunctionReturn(0); 16955a838052SSatish Balay } 16964a2ae208SSatish Balay #undef __FUNCT__ 16974a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 1698dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 1699bb5a7306SBarry Smith { 1700bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1701dfbe8321SBarry Smith PetscErrorCode ierr; 17023a40ed3dSBarry Smith 17033a40ed3dSBarry Smith PetscFunctionBegin; 1704bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 17053a40ed3dSBarry Smith PetscFunctionReturn(0); 1706bb5a7306SBarry Smith } 1707bb5a7306SBarry Smith 17084a2ae208SSatish Balay #undef __FUNCT__ 17094a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 1710dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag) 1711d4bb536fSBarry Smith { 1712d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 1713d4bb536fSBarry Smith Mat a,b,c,d; 1714d4bb536fSBarry Smith PetscTruth flg; 1715dfbe8321SBarry Smith PetscErrorCode ierr; 1716d4bb536fSBarry Smith 17173a40ed3dSBarry Smith PetscFunctionBegin; 1718d4bb536fSBarry Smith a = matA->A; b = matA->B; 1719d4bb536fSBarry Smith c = matB->A; d = matB->B; 1720d4bb536fSBarry Smith 1721d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 1722abc0a331SBarry Smith if (flg) { 1723d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 1724d4bb536fSBarry Smith } 17257adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 17263a40ed3dSBarry Smith PetscFunctionReturn(0); 1727d4bb536fSBarry Smith } 1728d4bb536fSBarry Smith 17294a2ae208SSatish Balay #undef __FUNCT__ 17304a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 1731dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 1732cb5b572fSBarry Smith { 1733dfbe8321SBarry Smith PetscErrorCode ierr; 1734cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 1735cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ *)B->data; 1736cb5b572fSBarry Smith 1737cb5b572fSBarry Smith PetscFunctionBegin; 173833f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 173933f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 1740cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 1741cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 1742cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 1743cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 1744cb5b572fSBarry Smith then copying the submatrices */ 1745cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1746cb5b572fSBarry Smith } else { 1747cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 1748cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 1749cb5b572fSBarry Smith } 1750cb5b572fSBarry Smith PetscFunctionReturn(0); 1751cb5b572fSBarry Smith } 1752cb5b572fSBarry Smith 17534a2ae208SSatish Balay #undef __FUNCT__ 17544a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ" 1755dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A) 1756273d9f13SBarry Smith { 1757dfbe8321SBarry Smith PetscErrorCode ierr; 1758273d9f13SBarry Smith 1759273d9f13SBarry Smith PetscFunctionBegin; 1760273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 1761273d9f13SBarry Smith PetscFunctionReturn(0); 1762273d9f13SBarry Smith } 1763273d9f13SBarry Smith 1764ac90fabeSBarry Smith #include "petscblaslapack.h" 1765ac90fabeSBarry Smith #undef __FUNCT__ 1766ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 1767f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 1768ac90fabeSBarry Smith { 1769dfbe8321SBarry Smith PetscErrorCode ierr; 1770b1d57f15SBarry Smith PetscInt i; 1771ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data; 17724ce68768SBarry Smith PetscBLASInt bnz,one=1; 1773ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 1774ac90fabeSBarry Smith 1775ac90fabeSBarry Smith PetscFunctionBegin; 1776ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 1777f4df32b1SMatthew Knepley PetscScalar alpha = a; 1778ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->A->data; 1779ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->A->data; 17800805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1781f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1782ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->B->data; 1783ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->B->data; 17840805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1785f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1786a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 1787f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 1788c537a176SHong Zhang 1789c537a176SHong Zhang x = (Mat_SeqAIJ *)xx->B->data; 1790a30b2313SHong Zhang y = (Mat_SeqAIJ *)yy->B->data; 1791a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 1792a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 1793a30b2313SHong Zhang ierr = MatDestroy(y->XtoY);CHKERRQ(ierr); 1794c537a176SHong Zhang } 1795a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 1796d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 1797a30b2313SHong Zhang y->XtoY = xx->B; 1798407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 1799c537a176SHong Zhang } 1800f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 1801ac90fabeSBarry Smith } else { 1802f4df32b1SMatthew Knepley ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 1803ac90fabeSBarry Smith } 1804ac90fabeSBarry Smith PetscFunctionReturn(0); 1805ac90fabeSBarry Smith } 1806ac90fabeSBarry Smith 1807354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat); 1808354c94deSBarry Smith 1809354c94deSBarry Smith #undef __FUNCT__ 1810354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 1811354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat) 1812354c94deSBarry Smith { 1813354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 1814354c94deSBarry Smith PetscErrorCode ierr; 1815354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 1816354c94deSBarry Smith 1817354c94deSBarry Smith PetscFunctionBegin; 1818354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 1819354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 1820354c94deSBarry Smith #else 1821354c94deSBarry Smith PetscFunctionBegin; 1822354c94deSBarry Smith #endif 1823354c94deSBarry Smith PetscFunctionReturn(0); 1824354c94deSBarry Smith } 1825354c94deSBarry Smith 182699cafbc1SBarry Smith #undef __FUNCT__ 182799cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 182899cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 182999cafbc1SBarry Smith { 183099cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 183199cafbc1SBarry Smith PetscErrorCode ierr; 183299cafbc1SBarry Smith 183399cafbc1SBarry Smith PetscFunctionBegin; 183499cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 183599cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 183699cafbc1SBarry Smith PetscFunctionReturn(0); 183799cafbc1SBarry Smith } 183899cafbc1SBarry Smith 183999cafbc1SBarry Smith #undef __FUNCT__ 184099cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 184199cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 184299cafbc1SBarry Smith { 184399cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 184499cafbc1SBarry Smith PetscErrorCode ierr; 184599cafbc1SBarry Smith 184699cafbc1SBarry Smith PetscFunctionBegin; 184799cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 184899cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 184999cafbc1SBarry Smith PetscFunctionReturn(0); 185099cafbc1SBarry Smith } 185199cafbc1SBarry Smith 1852103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 1853103bf8bdSMatthew Knepley 1854103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 1855a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 1856a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 1857a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 1858103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 1859a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 1860d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 1861103bf8bdSMatthew Knepley 1862103bf8bdSMatthew Knepley #undef __FUNCT__ 1863103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 1864103bf8bdSMatthew Knepley /* 1865103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1866103bf8bdSMatthew Knepley */ 18670481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 1868103bf8bdSMatthew Knepley { 1869a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 1870a2c909beSMatthew Knepley 1871a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1872a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 1873a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 1874a2c909beSMatthew Knepley 1875103bf8bdSMatthew Knepley PetscTruth row_identity, col_identity; 1876776b82aeSLisandro Dalcin PetscContainer c; 1877103bf8bdSMatthew Knepley PetscInt m, n, M, N; 1878103bf8bdSMatthew Knepley PetscErrorCode ierr; 1879103bf8bdSMatthew Knepley 1880103bf8bdSMatthew Knepley PetscFunctionBegin; 1881103bf8bdSMatthew Knepley if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 1882103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 1883103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 1884103bf8bdSMatthew Knepley if (!row_identity || !col_identity) { 1885103bf8bdSMatthew Knepley SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 1886103bf8bdSMatthew Knepley } 1887103bf8bdSMatthew Knepley 1888103bf8bdSMatthew Knepley process_group_type pg; 1889a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1890a2c909beSMatthew Knepley lgraph_type* lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 1891a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1892a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1893a2c909beSMatthew Knepley 1894103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 1895a2c909beSMatthew Knepley ilu_permuted(level_graph); 1896103bf8bdSMatthew Knepley 1897103bf8bdSMatthew Knepley /* put together the new matrix */ 18987adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr); 1899103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 1900103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 1901719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 1902719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 1903719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1904719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1905103bf8bdSMatthew Knepley 19067adad957SLisandro Dalcin ierr = PetscContainerCreate(((PetscObject)A)->comm, &c); 1907776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 1908719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 1909103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1910103bf8bdSMatthew Knepley } 1911103bf8bdSMatthew Knepley 1912103bf8bdSMatthew Knepley #undef __FUNCT__ 1913103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 19140481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 1915103bf8bdSMatthew Knepley { 1916103bf8bdSMatthew Knepley PetscFunctionBegin; 1917103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1918103bf8bdSMatthew Knepley } 1919103bf8bdSMatthew Knepley 1920103bf8bdSMatthew Knepley #undef __FUNCT__ 1921103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 1922103bf8bdSMatthew Knepley /* 1923103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1924103bf8bdSMatthew Knepley */ 1925103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 1926103bf8bdSMatthew Knepley { 1927a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1928a2c909beSMatthew Knepley 1929a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1930a2c909beSMatthew Knepley lgraph_type* lgraph_p; 1931776b82aeSLisandro Dalcin PetscContainer c; 1932103bf8bdSMatthew Knepley PetscErrorCode ierr; 1933103bf8bdSMatthew Knepley 1934103bf8bdSMatthew Knepley PetscFunctionBegin; 1935103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr); 1936776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr); 1937103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 1938a2c909beSMatthew Knepley 1939a2c909beSMatthew Knepley PetscScalar* array_x; 1940a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 1941a2c909beSMatthew Knepley PetscInt sx; 1942a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 1943a2c909beSMatthew Knepley 1944a2c909beSMatthew Knepley PetscScalar* array_b; 1945a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 1946a2c909beSMatthew Knepley PetscInt sb; 1947a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 1948a2c909beSMatthew Knepley 1949a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1950a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1951a2c909beSMatthew Knepley 1952a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 1953a2c909beSMatthew Knepley array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]), 1954a2c909beSMatthew Knepley ref_x(array_x, boost::extents[num_vertices(graph)]); 1955a2c909beSMatthew Knepley 1956a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 1957a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 1958a2c909beSMatthew Knepley gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)), 1959a2c909beSMatthew Knepley vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 1960a2c909beSMatthew Knepley 1961a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 1962a2c909beSMatthew Knepley 1963103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1964103bf8bdSMatthew Knepley } 1965103bf8bdSMatthew Knepley #endif 1966103bf8bdSMatthew Knepley 196769db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */ 196869db28dcSHong Zhang PetscInt nzlocal,nsends,nrecvs; 1969aa5bb8c0SSatish Balay PetscMPIInt *send_rank; 1970aa5bb8c0SSatish Balay PetscInt *sbuf_nz,*sbuf_j,**rbuf_j; 197169db28dcSHong Zhang PetscScalar *sbuf_a,**rbuf_a; 197269db28dcSHong Zhang PetscErrorCode (*MatDestroy)(Mat); 197369db28dcSHong Zhang } Mat_Redundant; 197469db28dcSHong Zhang 197569db28dcSHong Zhang #undef __FUNCT__ 197669db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant" 197769db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr) 197869db28dcSHong Zhang { 197969db28dcSHong Zhang PetscErrorCode ierr; 198069db28dcSHong Zhang Mat_Redundant *redund=(Mat_Redundant*)ptr; 198169db28dcSHong Zhang PetscInt i; 198269db28dcSHong Zhang 198369db28dcSHong Zhang PetscFunctionBegin; 198469db28dcSHong Zhang ierr = PetscFree(redund->send_rank);CHKERRQ(ierr); 198569db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 198669db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 198769db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++){ 198869db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 198969db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 199069db28dcSHong Zhang } 199169db28dcSHong Zhang ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 199269db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 199369db28dcSHong Zhang PetscFunctionReturn(0); 199469db28dcSHong Zhang } 199569db28dcSHong Zhang 199669db28dcSHong Zhang #undef __FUNCT__ 199769db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 199869db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 199969db28dcSHong Zhang { 200069db28dcSHong Zhang PetscErrorCode ierr; 200169db28dcSHong Zhang PetscContainer container; 200269db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 200369db28dcSHong Zhang 200469db28dcSHong Zhang PetscFunctionBegin; 200569db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 200669db28dcSHong Zhang if (container) { 200769db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 200869db28dcSHong Zhang } else { 200969db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 201069db28dcSHong Zhang } 201169db28dcSHong Zhang A->ops->destroy = redund->MatDestroy; 201269db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr); 201369db28dcSHong Zhang ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 201469db28dcSHong Zhang ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 201569db28dcSHong Zhang PetscFunctionReturn(0); 201669db28dcSHong Zhang } 201769db28dcSHong Zhang 201869db28dcSHong Zhang #undef __FUNCT__ 201969db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 202069db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant) 202169db28dcSHong Zhang { 202269db28dcSHong Zhang PetscMPIInt rank,size; 20237adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mat)->comm; 202469db28dcSHong Zhang PetscErrorCode ierr; 202569db28dcSHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0; 202669db28dcSHong Zhang PetscMPIInt *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL; 2027d0f46423SBarry Smith PetscInt *rowrange=mat->rmap->range; 202869db28dcSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 202969db28dcSHong Zhang Mat A=aij->A,B=aij->B,C=*matredundant; 203069db28dcSHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 203169db28dcSHong Zhang PetscScalar *sbuf_a; 203269db28dcSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2033d0f46423SBarry Smith PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 2034d0f46423SBarry Smith PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N; 203569db28dcSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2036a77337e4SBarry Smith MatScalar *aworkA,*aworkB; 2037a77337e4SBarry Smith PetscScalar *vals; 203869db28dcSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 203969db28dcSHong Zhang MPI_Request *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL, 204069db28dcSHong Zhang *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL; 204169db28dcSHong Zhang MPI_Status recv_status,*send_status; 204269db28dcSHong Zhang PetscInt *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count; 204369db28dcSHong Zhang PetscInt **rbuf_j=PETSC_NULL; 204469db28dcSHong Zhang PetscScalar **rbuf_a=PETSC_NULL; 204569db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 204669db28dcSHong Zhang PetscContainer container; 204769db28dcSHong Zhang 204869db28dcSHong Zhang PetscFunctionBegin; 204969db28dcSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 205069db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 205169db28dcSHong Zhang 205269db28dcSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 205369db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2054d0f46423SBarry Smith if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 205569db28dcSHong Zhang ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr); 205669db28dcSHong Zhang if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size"); 205769db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 205869db28dcSHong Zhang if (container) { 205969db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 206069db28dcSHong Zhang } else { 206169db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 206269db28dcSHong Zhang } 206369db28dcSHong Zhang if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 206469db28dcSHong Zhang 206569db28dcSHong Zhang nsends = redund->nsends; 206669db28dcSHong Zhang nrecvs = redund->nrecvs; 206769db28dcSHong Zhang send_rank = redund->send_rank; recv_rank = send_rank + size; 206869db28dcSHong Zhang sbuf_nz = redund->sbuf_nz; rbuf_nz = sbuf_nz + nsends; 206969db28dcSHong Zhang sbuf_j = redund->sbuf_j; 207069db28dcSHong Zhang sbuf_a = redund->sbuf_a; 207169db28dcSHong Zhang rbuf_j = redund->rbuf_j; 207269db28dcSHong Zhang rbuf_a = redund->rbuf_a; 207369db28dcSHong Zhang } 207469db28dcSHong Zhang 207569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 207669db28dcSHong Zhang PetscMPIInt subrank,subsize; 207769db28dcSHong Zhang PetscInt nleftover,np_subcomm; 207869db28dcSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 207969db28dcSHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 208069db28dcSHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 208169db28dcSHong Zhang ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank); 208269db28dcSHong Zhang recv_rank = send_rank + size; 208369db28dcSHong Zhang np_subcomm = size/nsubcomm; 208469db28dcSHong Zhang nleftover = size - nsubcomm*np_subcomm; 208569db28dcSHong Zhang nsends = 0; nrecvs = 0; 208669db28dcSHong Zhang for (i=0; i<size; i++){ /* i=rank*/ 208769db28dcSHong Zhang if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */ 208869db28dcSHong Zhang send_rank[nsends] = i; nsends++; 208969db28dcSHong Zhang recv_rank[nrecvs++] = i; 209069db28dcSHong Zhang } 209169db28dcSHong Zhang } 209269db28dcSHong Zhang if (rank >= size - nleftover){/* this proc is a leftover processor */ 209369db28dcSHong Zhang i = size-nleftover-1; 209469db28dcSHong Zhang j = 0; 209569db28dcSHong Zhang while (j < nsubcomm - nleftover){ 209669db28dcSHong Zhang send_rank[nsends++] = i; 209769db28dcSHong Zhang i--; j++; 209869db28dcSHong Zhang } 209969db28dcSHong Zhang } 210069db28dcSHong Zhang 210169db28dcSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */ 210269db28dcSHong Zhang for (i=0; i<nleftover; i++){ 210369db28dcSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 210469db28dcSHong Zhang } 210569db28dcSHong Zhang } 210669db28dcSHong Zhang 210769db28dcSHong Zhang /* allocate sbuf_j, sbuf_a */ 210869db28dcSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 210969db28dcSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 211069db28dcSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 211169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 211269db28dcSHong Zhang 211369db28dcSHong Zhang /* copy mat's local entries into the buffers */ 211469db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 211569db28dcSHong Zhang rownz_max = 0; 211669db28dcSHong Zhang rptr = sbuf_j; 211769db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 211869db28dcSHong Zhang vals = sbuf_a; 211969db28dcSHong Zhang rptr[0] = 0; 212069db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 212169db28dcSHong Zhang row = i + rstart; 212269db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 212369db28dcSHong Zhang ncols = nzA + nzB; 212469db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 212569db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 212669db28dcSHong Zhang /* load the column indices for this row into cols */ 212769db28dcSHong Zhang lwrite = 0; 212869db28dcSHong Zhang for (l=0; l<nzB; l++) { 212969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart){ 213069db28dcSHong Zhang vals[lwrite] = aworkB[l]; 213169db28dcSHong Zhang cols[lwrite++] = ctmp; 213269db28dcSHong Zhang } 213369db28dcSHong Zhang } 213469db28dcSHong Zhang for (l=0; l<nzA; l++){ 213569db28dcSHong Zhang vals[lwrite] = aworkA[l]; 213669db28dcSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 213769db28dcSHong Zhang } 213869db28dcSHong Zhang for (l=0; l<nzB; l++) { 213969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend){ 214069db28dcSHong Zhang vals[lwrite] = aworkB[l]; 214169db28dcSHong Zhang cols[lwrite++] = ctmp; 214269db28dcSHong Zhang } 214369db28dcSHong Zhang } 214469db28dcSHong Zhang vals += ncols; 214569db28dcSHong Zhang cols += ncols; 214669db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 214769db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 214869db28dcSHong Zhang } 214969db28dcSHong Zhang if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 215069db28dcSHong Zhang } else { /* only copy matrix values into sbuf_a */ 215169db28dcSHong Zhang rptr = sbuf_j; 215269db28dcSHong Zhang vals = sbuf_a; 215369db28dcSHong Zhang rptr[0] = 0; 215469db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 215569db28dcSHong Zhang row = i + rstart; 215669db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 215769db28dcSHong Zhang ncols = nzA + nzB; 215869db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 215969db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 216069db28dcSHong Zhang lwrite = 0; 216169db28dcSHong Zhang for (l=0; l<nzB; l++) { 216269db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 216369db28dcSHong Zhang } 216469db28dcSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 216569db28dcSHong Zhang for (l=0; l<nzB; l++) { 216669db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 216769db28dcSHong Zhang } 216869db28dcSHong Zhang vals += ncols; 216969db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 217069db28dcSHong Zhang } 217169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 217269db28dcSHong Zhang 217369db28dcSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 217469db28dcSHong Zhang /*--------------------------------------------------*/ 217569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 217669db28dcSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 217769db28dcSHong Zhang s_waits2 = s_waits3 + nsends; 217869db28dcSHong Zhang s_waits1 = s_waits2 + nsends; 217969db28dcSHong Zhang r_waits1 = s_waits1 + nsends; 218069db28dcSHong Zhang r_waits2 = r_waits1 + nrecvs; 218169db28dcSHong Zhang r_waits3 = r_waits2 + nrecvs; 218269db28dcSHong Zhang } else { 218369db28dcSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 218469db28dcSHong Zhang r_waits3 = s_waits3 + nsends; 218569db28dcSHong Zhang } 218669db28dcSHong Zhang 218769db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 218869db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 218969db28dcSHong Zhang /* get new tags to keep the communication clean */ 219069db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 219169db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 219269db28dcSHong Zhang ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 219369db28dcSHong Zhang rbuf_nz = sbuf_nz + nsends; 219469db28dcSHong Zhang 219569db28dcSHong Zhang /* post receives of other's nzlocal */ 219669db28dcSHong Zhang for (i=0; i<nrecvs; i++){ 219769db28dcSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 219869db28dcSHong Zhang } 219969db28dcSHong Zhang /* send nzlocal to others */ 220069db28dcSHong Zhang for (i=0; i<nsends; i++){ 220169db28dcSHong Zhang sbuf_nz[i] = nzlocal; 220269db28dcSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 220369db28dcSHong Zhang } 220469db28dcSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 220569db28dcSHong Zhang count = nrecvs; 220669db28dcSHong Zhang while (count) { 220769db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 220869db28dcSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 220969db28dcSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 221069db28dcSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 221169db28dcSHong Zhang 221269db28dcSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 221369db28dcSHong Zhang rbuf_nz[imdex] += i + 2; 221469db28dcSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 221569db28dcSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 221669db28dcSHong Zhang count--; 221769db28dcSHong Zhang } 221869db28dcSHong Zhang /* wait on sends of nzlocal */ 221969db28dcSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 222069db28dcSHong Zhang /* send mat->i,j to others, and recv from other's */ 222169db28dcSHong Zhang /*------------------------------------------------*/ 222269db28dcSHong Zhang for (i=0; i<nsends; i++){ 222369db28dcSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 222469db28dcSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 222569db28dcSHong Zhang } 222669db28dcSHong Zhang /* wait on receives of mat->i,j */ 222769db28dcSHong Zhang /*------------------------------*/ 222869db28dcSHong Zhang count = nrecvs; 222969db28dcSHong Zhang while (count) { 223069db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 223169db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 223269db28dcSHong Zhang count--; 223369db28dcSHong Zhang } 223469db28dcSHong Zhang /* wait on sends of mat->i,j */ 223569db28dcSHong Zhang /*---------------------------*/ 223669db28dcSHong Zhang if (nsends) { 223769db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 223869db28dcSHong Zhang } 223969db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 224069db28dcSHong Zhang 224169db28dcSHong Zhang /* post receives, send and receive mat->a */ 224269db28dcSHong Zhang /*----------------------------------------*/ 224369db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 224469db28dcSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 224569db28dcSHong Zhang } 224669db28dcSHong Zhang for (i=0; i<nsends; i++){ 224769db28dcSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 224869db28dcSHong Zhang } 224969db28dcSHong Zhang count = nrecvs; 225069db28dcSHong Zhang while (count) { 225169db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 225269db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 225369db28dcSHong Zhang count--; 225469db28dcSHong Zhang } 225569db28dcSHong Zhang if (nsends) { 225669db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 225769db28dcSHong Zhang } 225869db28dcSHong Zhang 225969db28dcSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 226069db28dcSHong Zhang 226169db28dcSHong Zhang /* create redundant matrix */ 226269db28dcSHong Zhang /*-------------------------*/ 226369db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 226469db28dcSHong Zhang /* compute rownz_max for preallocation */ 226569db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 226669db28dcSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 226769db28dcSHong Zhang rptr = rbuf_j[imdex]; 226869db28dcSHong Zhang for (i=0; i<j; i++){ 226969db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 227069db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 227169db28dcSHong Zhang } 227269db28dcSHong Zhang } 227369db28dcSHong Zhang 227469db28dcSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 227569db28dcSHong Zhang ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 227669db28dcSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 227769db28dcSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr); 227869db28dcSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr); 227969db28dcSHong Zhang } else { 228069db28dcSHong Zhang C = *matredundant; 228169db28dcSHong Zhang } 228269db28dcSHong Zhang 228369db28dcSHong Zhang /* insert local matrix entries */ 228469db28dcSHong Zhang rptr = sbuf_j; 228569db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 228669db28dcSHong Zhang vals = sbuf_a; 228769db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 228869db28dcSHong Zhang row = i + rstart; 228969db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 229069db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 229169db28dcSHong Zhang vals += ncols; 229269db28dcSHong Zhang cols += ncols; 229369db28dcSHong Zhang } 229469db28dcSHong Zhang /* insert received matrix entries */ 229569db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 229669db28dcSHong Zhang rstart = rowrange[recv_rank[imdex]]; 229769db28dcSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 229869db28dcSHong Zhang rptr = rbuf_j[imdex]; 229969db28dcSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 230069db28dcSHong Zhang vals = rbuf_a[imdex]; 230169db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 230269db28dcSHong Zhang row = i + rstart; 230369db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 230469db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 230569db28dcSHong Zhang vals += ncols; 230669db28dcSHong Zhang cols += ncols; 230769db28dcSHong Zhang } 230869db28dcSHong Zhang } 230969db28dcSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 231069db28dcSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 231169db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2312d0f46423SBarry Smith if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N); 231369db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 231469db28dcSHong Zhang PetscContainer container; 231569db28dcSHong Zhang *matredundant = C; 231669db28dcSHong Zhang /* create a supporting struct and attach it to C for reuse */ 231738f2d2fdSLisandro Dalcin ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 231869db28dcSHong Zhang ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 231969db28dcSHong Zhang ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr); 232069db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr); 232169db28dcSHong Zhang ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr); 232269db28dcSHong Zhang 232369db28dcSHong Zhang redund->nzlocal = nzlocal; 232469db28dcSHong Zhang redund->nsends = nsends; 232569db28dcSHong Zhang redund->nrecvs = nrecvs; 232669db28dcSHong Zhang redund->send_rank = send_rank; 232769db28dcSHong Zhang redund->sbuf_nz = sbuf_nz; 232869db28dcSHong Zhang redund->sbuf_j = sbuf_j; 232969db28dcSHong Zhang redund->sbuf_a = sbuf_a; 233069db28dcSHong Zhang redund->rbuf_j = rbuf_j; 233169db28dcSHong Zhang redund->rbuf_a = rbuf_a; 233269db28dcSHong Zhang 233369db28dcSHong Zhang redund->MatDestroy = C->ops->destroy; 233469db28dcSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 233569db28dcSHong Zhang } 233669db28dcSHong Zhang PetscFunctionReturn(0); 233769db28dcSHong Zhang } 233869db28dcSHong Zhang 233903bc72f1SMatthew Knepley #undef __FUNCT__ 2340c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2341c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2342c91732d9SHong Zhang { 2343c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2344c91732d9SHong Zhang PetscErrorCode ierr; 2345c91732d9SHong Zhang PetscInt i,*idxb = 0; 2346c91732d9SHong Zhang PetscScalar *va,*vb; 2347c91732d9SHong Zhang Vec vtmp; 2348c91732d9SHong Zhang 2349c91732d9SHong Zhang PetscFunctionBegin; 2350c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 2351c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2352c91732d9SHong Zhang if (idx) { 2353192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 2354d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2355c91732d9SHong Zhang } 2356c91732d9SHong Zhang } 2357c91732d9SHong Zhang 2358d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2359c91732d9SHong Zhang if (idx) { 2360d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2361c91732d9SHong Zhang } 2362c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2363c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2364c91732d9SHong Zhang 2365d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++){ 2366c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 2367c91732d9SHong Zhang va[i] = vb[i]; 2368c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 2369c91732d9SHong Zhang } 2370c91732d9SHong Zhang } 2371c91732d9SHong Zhang 2372c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2373c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2374c91732d9SHong Zhang if (idxb) { 2375c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 2376c91732d9SHong Zhang } 2377c91732d9SHong Zhang ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2378c91732d9SHong Zhang PetscFunctionReturn(0); 2379c91732d9SHong Zhang } 2380c91732d9SHong Zhang 2381c91732d9SHong Zhang #undef __FUNCT__ 2382c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 2383c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2384c87e5d42SMatthew Knepley { 2385c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2386c87e5d42SMatthew Knepley PetscErrorCode ierr; 2387c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 2388c87e5d42SMatthew Knepley PetscScalar *va,*vb; 2389c87e5d42SMatthew Knepley Vec vtmp; 2390c87e5d42SMatthew Knepley 2391c87e5d42SMatthew Knepley PetscFunctionBegin; 2392c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 2393c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2394c87e5d42SMatthew Knepley if (idx) { 2395c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 2396c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2397c87e5d42SMatthew Knepley } 2398c87e5d42SMatthew Knepley } 2399c87e5d42SMatthew Knepley 2400c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2401c87e5d42SMatthew Knepley if (idx) { 2402c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2403c87e5d42SMatthew Knepley } 2404c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2405c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2406c87e5d42SMatthew Knepley 2407c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++){ 2408c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 2409c87e5d42SMatthew Knepley va[i] = vb[i]; 2410c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 2411c87e5d42SMatthew Knepley } 2412c87e5d42SMatthew Knepley } 2413c87e5d42SMatthew Knepley 2414c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2415c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2416c87e5d42SMatthew Knepley if (idxb) { 2417c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 2418c87e5d42SMatthew Knepley } 2419c87e5d42SMatthew Knepley ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2420c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2421c87e5d42SMatthew Knepley } 2422c87e5d42SMatthew Knepley 2423c87e5d42SMatthew Knepley #undef __FUNCT__ 242403bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 242503bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 242603bc72f1SMatthew Knepley { 242703bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2428d0f46423SBarry Smith PetscInt n = A->rmap->n; 2429d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 243003bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 243103bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 243203bc72f1SMatthew Knepley Vec diagV, offdiagV; 243303bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 243403bc72f1SMatthew Knepley PetscInt r; 243503bc72f1SMatthew Knepley PetscErrorCode ierr; 243603bc72f1SMatthew Knepley 243703bc72f1SMatthew Knepley PetscFunctionBegin; 243803bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2439e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2440e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 244103bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 244203bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 244303bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 244403bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 244503bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 244603bc72f1SMatthew Knepley for(r = 0; r < n; ++r) { 2447028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 244803bc72f1SMatthew Knepley a[r] = diagA[r]; 244903bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 245003bc72f1SMatthew Knepley } else { 245103bc72f1SMatthew Knepley a[r] = offdiagA[r]; 245203bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 245303bc72f1SMatthew Knepley } 245403bc72f1SMatthew Knepley } 245503bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 245603bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 245703bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 245803bc72f1SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 245903bc72f1SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 246003bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 246103bc72f1SMatthew Knepley PetscFunctionReturn(0); 246203bc72f1SMatthew Knepley } 246303bc72f1SMatthew Knepley 24645494a064SHong Zhang #undef __FUNCT__ 2465c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 2466c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2467c87e5d42SMatthew Knepley { 2468c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2469c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 2470c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 2471c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 2472c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 2473c87e5d42SMatthew Knepley Vec diagV, offdiagV; 2474c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 2475c87e5d42SMatthew Knepley PetscInt r; 2476c87e5d42SMatthew Knepley PetscErrorCode ierr; 2477c87e5d42SMatthew Knepley 2478c87e5d42SMatthew Knepley PetscFunctionBegin; 2479c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2480c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2481c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 2482c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 2483c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 2484c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 2485c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 2486c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2487c87e5d42SMatthew Knepley for(r = 0; r < n; ++r) { 2488c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 2489c87e5d42SMatthew Knepley a[r] = diagA[r]; 2490c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 2491c87e5d42SMatthew Knepley } else { 2492c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 2493c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 2494c87e5d42SMatthew Knepley } 2495c87e5d42SMatthew Knepley } 2496c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 2497c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 2498c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2499c87e5d42SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 2500c87e5d42SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 2501c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 2502c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2503c87e5d42SMatthew Knepley } 2504c87e5d42SMatthew Knepley 2505c87e5d42SMatthew Knepley #undef __FUNCT__ 2506829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ" 2507829201f2SHong Zhang PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat[]) 25085494a064SHong Zhang { 25095494a064SHong Zhang PetscErrorCode ierr; 25105494a064SHong Zhang 25115494a064SHong Zhang PetscFunctionBegin; 25125494a064SHong Zhang ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,newmat);CHKERRQ(ierr); 25135494a064SHong Zhang PetscFunctionReturn(0); 25145494a064SHong Zhang } 25155494a064SHong Zhang 25168a729477SBarry Smith /* -------------------------------------------------------------------*/ 2517cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 2518cda55fadSBarry Smith MatGetRow_MPIAIJ, 2519cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 2520cda55fadSBarry Smith MatMult_MPIAIJ, 252197304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 25227c922b88SBarry Smith MatMultTranspose_MPIAIJ, 25237c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 2524103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2525103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 2526103bf8bdSMatthew Knepley #else 2527cda55fadSBarry Smith 0, 2528103bf8bdSMatthew Knepley #endif 2529cda55fadSBarry Smith 0, 2530cda55fadSBarry Smith 0, 253197304618SKris Buschelman /*10*/ 0, 2532cda55fadSBarry Smith 0, 2533cda55fadSBarry Smith 0, 253444a69424SLois Curfman McInnes MatRelax_MPIAIJ, 2535b7c46309SBarry Smith MatTranspose_MPIAIJ, 253697304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 2537cda55fadSBarry Smith MatEqual_MPIAIJ, 2538cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 2539cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 2540cda55fadSBarry Smith MatNorm_MPIAIJ, 254197304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 2542cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 25431eb62cbbSBarry Smith 0, 2544cda55fadSBarry Smith MatSetOption_MPIAIJ, 2545cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 254697304618SKris Buschelman /*25*/ MatZeroRows_MPIAIJ, 2547cda55fadSBarry Smith 0, 2548103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2549719d5645SBarry Smith 0, 2550103bf8bdSMatthew Knepley #else 2551cda55fadSBarry Smith 0, 2552103bf8bdSMatthew Knepley #endif 2553cda55fadSBarry Smith 0, 2554cda55fadSBarry Smith 0, 255597304618SKris Buschelman /*30*/ MatSetUpPreallocation_MPIAIJ, 2556103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2557719d5645SBarry Smith 0, 2558103bf8bdSMatthew Knepley #else 2559cda55fadSBarry Smith 0, 2560103bf8bdSMatthew Knepley #endif 2561cda55fadSBarry Smith 0, 2562cda55fadSBarry Smith 0, 2563cda55fadSBarry Smith 0, 256497304618SKris Buschelman /*35*/ MatDuplicate_MPIAIJ, 2565cda55fadSBarry Smith 0, 2566cda55fadSBarry Smith 0, 2567cda55fadSBarry Smith 0, 2568cda55fadSBarry Smith 0, 256997304618SKris Buschelman /*40*/ MatAXPY_MPIAIJ, 2570cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 2571cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 2572cda55fadSBarry Smith MatGetValues_MPIAIJ, 2573cb5b572fSBarry Smith MatCopy_MPIAIJ, 2574c87e5d42SMatthew Knepley /*45*/ MatGetRowMax_MPIAIJ, 2575cda55fadSBarry Smith MatScale_MPIAIJ, 2576cda55fadSBarry Smith 0, 2577cda55fadSBarry Smith 0, 2578cda55fadSBarry Smith 0, 2579521d7252SBarry Smith /*50*/ MatSetBlockSize_MPIAIJ, 2580cda55fadSBarry Smith 0, 2581cda55fadSBarry Smith 0, 2582cda55fadSBarry Smith 0, 2583cda55fadSBarry Smith 0, 258497304618SKris Buschelman /*55*/ MatFDColoringCreate_MPIAIJ, 2585cda55fadSBarry Smith 0, 2586cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 258742e855d1Svictor MatPermute_MPIAIJ, 2588cda55fadSBarry Smith 0, 258997304618SKris Buschelman /*60*/ MatGetSubMatrix_MPIAIJ, 2590e03a110bSBarry Smith MatDestroy_MPIAIJ, 2591e03a110bSBarry Smith MatView_MPIAIJ, 2592357abbc8SBarry Smith 0, 2593a2243be0SBarry Smith 0, 259497304618SKris Buschelman /*65*/ 0, 2595a2243be0SBarry Smith 0, 2596a2243be0SBarry Smith 0, 2597a2243be0SBarry Smith 0, 2598a2243be0SBarry Smith 0, 2599c91732d9SHong Zhang /*70*/ MatGetRowMaxAbs_MPIAIJ, 2600c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 2601a2243be0SBarry Smith 0, 2602a2243be0SBarry Smith MatSetColoring_MPIAIJ, 2603dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 2604779c1a83SBarry Smith MatSetValuesAdic_MPIAIJ, 2605dcf5cc72SBarry Smith #else 2606dcf5cc72SBarry Smith 0, 2607dcf5cc72SBarry Smith #endif 260897304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 260997304618SKris Buschelman /*75*/ 0, 261097304618SKris Buschelman 0, 261197304618SKris Buschelman 0, 261297304618SKris Buschelman 0, 261397304618SKris Buschelman 0, 261497304618SKris Buschelman /*80*/ 0, 261597304618SKris Buschelman 0, 261697304618SKris Buschelman 0, 261741acf15aSKris Buschelman /*84*/ MatLoad_MPIAIJ, 26186284ec50SHong Zhang 0, 26196284ec50SHong Zhang 0, 26206284ec50SHong Zhang 0, 26216284ec50SHong Zhang 0, 2622865e5f61SKris Buschelman 0, 2623865e5f61SKris Buschelman /*90*/ MatMatMult_MPIAIJ_MPIAIJ, 262426be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 262526be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 26267a7894deSKris Buschelman MatPtAP_Basic, 26277a7894deSKris Buschelman MatPtAPSymbolic_MPIAIJ, 26287a7894deSKris Buschelman /*95*/ MatPtAPNumeric_MPIAIJ, 26297a7894deSKris Buschelman 0, 26307a7894deSKris Buschelman 0, 26317a7894deSKris Buschelman 0, 26327a7894deSKris Buschelman 0, 26337a7894deSKris Buschelman /*100*/0, 2634865e5f61SKris Buschelman MatPtAPSymbolic_MPIAIJ_MPIAIJ, 26357a7894deSKris Buschelman MatPtAPNumeric_MPIAIJ_MPIAIJ, 26362fd7e33dSBarry Smith MatConjugate_MPIAIJ, 26372fd7e33dSBarry Smith 0, 263899cafbc1SBarry Smith /*105*/MatSetValuesRow_MPIAIJ, 263999cafbc1SBarry Smith MatRealPart_MPIAIJ, 264069db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 264169db28dcSHong Zhang 0, 264269db28dcSHong Zhang 0, 264369db28dcSHong Zhang /*110*/0, 264403bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 26455494a064SHong Zhang MatGetRowMin_MPIAIJ, 26465494a064SHong Zhang 0, 26475494a064SHong Zhang 0, 2648829201f2SHong Zhang /*115*/MatGetSeqNonzerostructure_MPIAIJ}; 264936ce4990SBarry Smith 26502e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 26512e8a6d31SBarry Smith 2652fb2e594dSBarry Smith EXTERN_C_BEGIN 26534a2ae208SSatish Balay #undef __FUNCT__ 26544a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 2655be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat) 26562e8a6d31SBarry Smith { 26572e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2658dfbe8321SBarry Smith PetscErrorCode ierr; 26592e8a6d31SBarry Smith 26602e8a6d31SBarry Smith PetscFunctionBegin; 26612e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 26622e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 26632e8a6d31SBarry Smith PetscFunctionReturn(0); 26642e8a6d31SBarry Smith } 2665fb2e594dSBarry Smith EXTERN_C_END 26662e8a6d31SBarry Smith 2667fb2e594dSBarry Smith EXTERN_C_BEGIN 26684a2ae208SSatish Balay #undef __FUNCT__ 26694a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 2670be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat) 26712e8a6d31SBarry Smith { 26722e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2673dfbe8321SBarry Smith PetscErrorCode ierr; 26742e8a6d31SBarry Smith 26752e8a6d31SBarry Smith PetscFunctionBegin; 26762e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 26772e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 26782e8a6d31SBarry Smith PetscFunctionReturn(0); 26792e8a6d31SBarry Smith } 2680fb2e594dSBarry Smith EXTERN_C_END 26818a729477SBarry Smith 2682e090d566SSatish Balay #include "petscpc.h" 268327508adbSBarry Smith EXTERN_C_BEGIN 26844a2ae208SSatish Balay #undef __FUNCT__ 2685a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 2686be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 2687a23d5eceSKris Buschelman { 2688a23d5eceSKris Buschelman Mat_MPIAIJ *b; 2689dfbe8321SBarry Smith PetscErrorCode ierr; 2690b1d57f15SBarry Smith PetscInt i; 2691a23d5eceSKris Buschelman 2692a23d5eceSKris Buschelman PetscFunctionBegin; 2693a23d5eceSKris Buschelman B->preallocated = PETSC_TRUE; 2694a23d5eceSKris Buschelman if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5; 2695a23d5eceSKris Buschelman if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2; 269677431f27SBarry Smith if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 269777431f27SBarry Smith if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 2698899cda47SBarry Smith 26997408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr); 27007408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr); 2701d0f46423SBarry Smith ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr); 2702d0f46423SBarry Smith ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr); 2703a23d5eceSKris Buschelman if (d_nnz) { 2704d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 270577431f27SBarry Smith if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]); 2706a23d5eceSKris Buschelman } 2707a23d5eceSKris Buschelman } 2708a23d5eceSKris Buschelman if (o_nnz) { 2709d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 271077431f27SBarry Smith if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]); 2711a23d5eceSKris Buschelman } 2712a23d5eceSKris Buschelman } 2713a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 2714899cda47SBarry Smith 2715899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 2716899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 2717d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 2718899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 2719899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 2720899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 2721d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 2722899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 2723899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 2724899cda47SBarry Smith 2725c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 2726c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 2727a23d5eceSKris Buschelman 2728a23d5eceSKris Buschelman PetscFunctionReturn(0); 2729a23d5eceSKris Buschelman } 2730a23d5eceSKris Buschelman EXTERN_C_END 2731a23d5eceSKris Buschelman 27324a2ae208SSatish Balay #undef __FUNCT__ 27334a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 2734dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 2735d6dfbf8fSBarry Smith { 2736d6dfbf8fSBarry Smith Mat mat; 2737416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 2738dfbe8321SBarry Smith PetscErrorCode ierr; 2739d6dfbf8fSBarry Smith 27403a40ed3dSBarry Smith PetscFunctionBegin; 2741416022c9SBarry Smith *newmat = 0; 27427adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 2743d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 27447adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 27451d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2746273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 2747e1b6402fSHong Zhang 2748d6dfbf8fSBarry Smith mat->factor = matin->factor; 2749d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 2750c456f294SBarry Smith mat->assembled = PETSC_TRUE; 2751e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 2752273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 2753d6dfbf8fSBarry Smith 275417699dbbSLois Curfman McInnes a->size = oldmat->size; 275517699dbbSLois Curfman McInnes a->rank = oldmat->rank; 2756e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 2757e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 2758e7641de0SSatish Balay a->rowindices = 0; 2759bcd2baecSBarry Smith a->rowvalues = 0; 2760bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 2761d6dfbf8fSBarry Smith 2762d0f46423SBarry Smith ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->rmap,mat->rmap);CHKERRQ(ierr); 2763d0f46423SBarry Smith ierr = PetscMapCopy(((PetscObject)mat)->comm,matin->cmap,mat->cmap);CHKERRQ(ierr); 2764899cda47SBarry Smith 27657adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr); 27662ee70a88SLois Curfman McInnes if (oldmat->colmap) { 2767aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 27680f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 2769b1fc9764SSatish Balay #else 2770d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 2771d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2772d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2773b1fc9764SSatish Balay #endif 2774416022c9SBarry Smith } else a->colmap = 0; 27753f41c07dSBarry Smith if (oldmat->garray) { 2776b1d57f15SBarry Smith PetscInt len; 2777d0f46423SBarry Smith len = oldmat->B->cmap->n; 2778b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 277952e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 2780b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 2781416022c9SBarry Smith } else a->garray = 0; 2782d6dfbf8fSBarry Smith 2783416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 278452e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 2785a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 278652e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 27872e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 278852e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 27892e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 279052e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 27917adad957SLisandro Dalcin ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 27928a729477SBarry Smith *newmat = mat; 27933a40ed3dSBarry Smith PetscFunctionReturn(0); 27948a729477SBarry Smith } 2795416022c9SBarry Smith 2796e090d566SSatish Balay #include "petscsys.h" 2797416022c9SBarry Smith 27984a2ae208SSatish Balay #undef __FUNCT__ 27994a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ" 2800a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat) 2801416022c9SBarry Smith { 2802d65a2f8fSBarry Smith Mat A; 280387828ca2SBarry Smith PetscScalar *vals,*svals; 280419bcc07fSBarry Smith MPI_Comm comm = ((PetscObject)viewer)->comm; 2805416022c9SBarry Smith MPI_Status status; 28066849ba73SBarry Smith PetscErrorCode ierr; 2807dc231df0SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,maxnz; 2808167e7480SBarry Smith PetscInt i,nz,j,rstart,rend,mmax; 2809b1d57f15SBarry Smith PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 2810910ba992SMatthew Knepley PetscInt *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols; 2811dc231df0SBarry Smith PetscInt cend,cstart,n,*rowners; 2812b1d57f15SBarry Smith int fd; 2813416022c9SBarry Smith 28143a40ed3dSBarry Smith PetscFunctionBegin; 28151dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 28161dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 281717699dbbSLois Curfman McInnes if (!rank) { 2818b0a32e0cSBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 28190752156aSBarry Smith ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 2820552e946dSBarry Smith if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 28216c5fab8fSBarry Smith } 28226c5fab8fSBarry Smith 2823b1d57f15SBarry Smith ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 2824416022c9SBarry Smith M = header[1]; N = header[2]; 2825416022c9SBarry Smith /* determine ownership of all rows */ 282629cdbbc8SSatish Balay m = M/size + ((M % size) > rank); 2827dc231df0SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 2828dc231df0SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 2829167e7480SBarry Smith 2830167e7480SBarry Smith /* First process needs enough room for process with most rows */ 2831167e7480SBarry Smith if (!rank) { 2832167e7480SBarry Smith mmax = rowners[1]; 2833167e7480SBarry Smith for (i=2; i<size; i++) { 2834167e7480SBarry Smith mmax = PetscMax(mmax,rowners[i]); 2835167e7480SBarry Smith } 2836167e7480SBarry Smith } else mmax = m; 2837167e7480SBarry Smith 2838416022c9SBarry Smith rowners[0] = 0; 283917699dbbSLois Curfman McInnes for (i=2; i<=size; i++) { 2840416022c9SBarry Smith rowners[i] += rowners[i-1]; 2841416022c9SBarry Smith } 284217699dbbSLois Curfman McInnes rstart = rowners[rank]; 284317699dbbSLois Curfman McInnes rend = rowners[rank+1]; 2844416022c9SBarry Smith 2845416022c9SBarry Smith /* distribute row lengths to all processors */ 2846167e7480SBarry Smith ierr = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr); 284717699dbbSLois Curfman McInnes if (!rank) { 2848dc231df0SBarry Smith ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 2849dc231df0SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 2850b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 2851b1d57f15SBarry Smith ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 2852dc231df0SBarry Smith for (j=0; j<m; j++) { 2853dc231df0SBarry Smith procsnz[0] += ourlens[j]; 2854dc231df0SBarry Smith } 2855dc231df0SBarry Smith for (i=1; i<size; i++) { 2856dc231df0SBarry Smith ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 2857dc231df0SBarry Smith /* calculate the number of nonzeros on each processor */ 2858dc231df0SBarry Smith for (j=0; j<rowners[i+1]-rowners[i]; j++) { 2859416022c9SBarry Smith procsnz[i] += rowlengths[j]; 2860416022c9SBarry Smith } 2861dc231df0SBarry Smith ierr = MPI_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2862416022c9SBarry Smith } 2863606d414cSSatish Balay ierr = PetscFree(rowlengths);CHKERRQ(ierr); 2864dc231df0SBarry Smith } else { 2865dc231df0SBarry Smith ierr = MPI_Recv(ourlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 2866dc231df0SBarry Smith } 2867416022c9SBarry Smith 2868dc231df0SBarry Smith if (!rank) { 2869416022c9SBarry Smith /* determine max buffer needed and allocate it */ 2870416022c9SBarry Smith maxnz = 0; 28718a8e0b3aSBarry Smith for (i=0; i<size; i++) { 28720452661fSBarry Smith maxnz = PetscMax(maxnz,procsnz[i]); 2873416022c9SBarry Smith } 2874b1d57f15SBarry Smith ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2875416022c9SBarry Smith 2876416022c9SBarry Smith /* read in my part of the matrix column indices */ 2877416022c9SBarry Smith nz = procsnz[0]; 2878b1d57f15SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 28790752156aSBarry Smith ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 2880d65a2f8fSBarry Smith 2881d65a2f8fSBarry Smith /* read in every one elses and ship off */ 288217699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 2883d65a2f8fSBarry Smith nz = procsnz[i]; 28840752156aSBarry Smith ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 2885b1d57f15SBarry Smith ierr = MPI_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2886d65a2f8fSBarry Smith } 2887606d414cSSatish Balay ierr = PetscFree(cols);CHKERRQ(ierr); 28883a40ed3dSBarry Smith } else { 2889416022c9SBarry Smith /* determine buffer space needed for message */ 2890416022c9SBarry Smith nz = 0; 2891416022c9SBarry Smith for (i=0; i<m; i++) { 2892416022c9SBarry Smith nz += ourlens[i]; 2893416022c9SBarry Smith } 2894dc231df0SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 2895416022c9SBarry Smith 2896416022c9SBarry Smith /* receive message of column indices*/ 2897b1d57f15SBarry Smith ierr = MPI_Recv(mycols,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 2898b1d57f15SBarry Smith ierr = MPI_Get_count(&status,MPIU_INT,&maxnz);CHKERRQ(ierr); 289929bbc08cSBarry Smith if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file"); 2900416022c9SBarry Smith } 2901416022c9SBarry Smith 2902b362ba68SBarry Smith /* determine column ownership if matrix is not square */ 2903b362ba68SBarry Smith if (N != M) { 2904b362ba68SBarry Smith n = N/size + ((N % size) > rank); 2905b1d57f15SBarry Smith ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 2906b362ba68SBarry Smith cstart = cend - n; 2907b362ba68SBarry Smith } else { 2908b362ba68SBarry Smith cstart = rstart; 2909b362ba68SBarry Smith cend = rend; 2910fb2e594dSBarry Smith n = cend - cstart; 2911b362ba68SBarry Smith } 2912b362ba68SBarry Smith 2913416022c9SBarry Smith /* loop over local rows, determining number of off diagonal entries */ 2914b1d57f15SBarry Smith ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 2915416022c9SBarry Smith jj = 0; 2916416022c9SBarry Smith for (i=0; i<m; i++) { 2917416022c9SBarry Smith for (j=0; j<ourlens[i]; j++) { 2918b362ba68SBarry Smith if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 2919416022c9SBarry Smith jj++; 2920416022c9SBarry Smith } 2921416022c9SBarry Smith } 2922d65a2f8fSBarry Smith 2923d65a2f8fSBarry Smith /* create our matrix */ 2924416022c9SBarry Smith for (i=0; i<m; i++) { 2925416022c9SBarry Smith ourlens[i] -= offlens[i]; 2926416022c9SBarry Smith } 2927f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&A);CHKERRQ(ierr); 2928f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr); 2929d10c748bSKris Buschelman ierr = MatSetType(A,type);CHKERRQ(ierr); 2930d10c748bSKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr); 2931d10c748bSKris Buschelman 2932d65a2f8fSBarry Smith for (i=0; i<m; i++) { 2933d65a2f8fSBarry Smith ourlens[i] += offlens[i]; 2934d65a2f8fSBarry Smith } 2935416022c9SBarry Smith 293617699dbbSLois Curfman McInnes if (!rank) { 2937906b51c7SHong Zhang ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 2938416022c9SBarry Smith 2939416022c9SBarry Smith /* read in my part of the matrix numerical values */ 2940416022c9SBarry Smith nz = procsnz[0]; 29410752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 2942d65a2f8fSBarry Smith 2943d65a2f8fSBarry Smith /* insert into matrix */ 2944d65a2f8fSBarry Smith jj = rstart; 2945d65a2f8fSBarry Smith smycols = mycols; 2946d65a2f8fSBarry Smith svals = vals; 2947d65a2f8fSBarry Smith for (i=0; i<m; i++) { 2948dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 2949d65a2f8fSBarry Smith smycols += ourlens[i]; 2950d65a2f8fSBarry Smith svals += ourlens[i]; 2951d65a2f8fSBarry Smith jj++; 2952416022c9SBarry Smith } 2953416022c9SBarry Smith 2954d65a2f8fSBarry Smith /* read in other processors and ship out */ 295517699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 2956416022c9SBarry Smith nz = procsnz[i]; 29570752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 29587adad957SLisandro Dalcin ierr = MPI_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr); 2959416022c9SBarry Smith } 2960606d414cSSatish Balay ierr = PetscFree(procsnz);CHKERRQ(ierr); 29613a40ed3dSBarry Smith } else { 2962d65a2f8fSBarry Smith /* receive numeric values */ 296387828ca2SBarry Smith ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 2964416022c9SBarry Smith 2965d65a2f8fSBarry Smith /* receive message of values*/ 29667adad957SLisandro Dalcin ierr = MPI_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr); 2967ca161407SBarry Smith ierr = MPI_Get_count(&status,MPIU_SCALAR,&maxnz);CHKERRQ(ierr); 296829bbc08cSBarry Smith if (maxnz != nz) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file"); 2969d65a2f8fSBarry Smith 2970d65a2f8fSBarry Smith /* insert into matrix */ 2971d65a2f8fSBarry Smith jj = rstart; 2972d65a2f8fSBarry Smith smycols = mycols; 2973d65a2f8fSBarry Smith svals = vals; 2974d65a2f8fSBarry Smith for (i=0; i<m; i++) { 2975dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 2976d65a2f8fSBarry Smith smycols += ourlens[i]; 2977d65a2f8fSBarry Smith svals += ourlens[i]; 2978d65a2f8fSBarry Smith jj++; 2979d65a2f8fSBarry Smith } 2980d65a2f8fSBarry Smith } 2981dc231df0SBarry Smith ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 2982606d414cSSatish Balay ierr = PetscFree(vals);CHKERRQ(ierr); 2983606d414cSSatish Balay ierr = PetscFree(mycols);CHKERRQ(ierr); 2984606d414cSSatish Balay ierr = PetscFree(rowners);CHKERRQ(ierr); 2985d65a2f8fSBarry Smith 29866d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 29876d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2988d10c748bSKris Buschelman *newmat = A; 29893a40ed3dSBarry Smith PetscFunctionReturn(0); 2990416022c9SBarry Smith } 2991a0ff6018SBarry Smith 29924a2ae208SSatish Balay #undef __FUNCT__ 29934a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 2994a0ff6018SBarry Smith /* 299529da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 299629da9460SBarry Smith in local and then by concatenating the local matrices the end result. 299729da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 2998a0ff6018SBarry Smith */ 2999b1d57f15SBarry Smith PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3000a0ff6018SBarry Smith { 3001dfbe8321SBarry Smith PetscErrorCode ierr; 300232dcc486SBarry Smith PetscMPIInt rank,size; 3003b1d57f15SBarry Smith PetscInt i,m,n,rstart,row,rend,nz,*cwork,j; 3004b1d57f15SBarry Smith PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal; 3005fee21e36SBarry Smith Mat *local,M,Mreuse; 3006a77337e4SBarry Smith MatScalar *vwork,*aa; 30077adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 300800e6dbe6SBarry Smith Mat_SeqAIJ *aij; 30097e2c5f70SBarry Smith 3010a0ff6018SBarry Smith 3011a0ff6018SBarry Smith PetscFunctionBegin; 30121dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 30131dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 301400e6dbe6SBarry Smith 3015fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3016fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr); 3017e005ede5SBarry Smith if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3018fee21e36SBarry Smith local = &Mreuse; 3019fee21e36SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr); 3020fee21e36SBarry Smith } else { 3021a0ff6018SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr); 3022fee21e36SBarry Smith Mreuse = *local; 3023606d414cSSatish Balay ierr = PetscFree(local);CHKERRQ(ierr); 3024fee21e36SBarry Smith } 3025a0ff6018SBarry Smith 3026a0ff6018SBarry Smith /* 3027a0ff6018SBarry Smith m - number of local rows 3028a0ff6018SBarry Smith n - number of columns (same on all processors) 3029a0ff6018SBarry Smith rstart - first row in new global matrix generated 3030a0ff6018SBarry Smith */ 3031fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3032a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3033fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 303400e6dbe6SBarry Smith ii = aij->i; 303500e6dbe6SBarry Smith jj = aij->j; 303600e6dbe6SBarry Smith 3037a0ff6018SBarry Smith /* 303800e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 303900e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3040a0ff6018SBarry Smith */ 304100e6dbe6SBarry Smith 304200e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 30436a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3044ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3045ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3046e2c4fddaSBarry Smith nlocal = m; 30476a6a5d1dSBarry Smith } else { 3048ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3049ab50ec6bSBarry Smith } 3050ab50ec6bSBarry Smith } else { 30516a6a5d1dSBarry Smith nlocal = csize; 30526a6a5d1dSBarry Smith } 3053b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 305400e6dbe6SBarry Smith rstart = rend - nlocal; 30556a6a5d1dSBarry Smith if (rank == size - 1 && rend != n) { 305677431f27SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 30576a6a5d1dSBarry Smith } 305800e6dbe6SBarry Smith 305900e6dbe6SBarry Smith /* next, compute all the lengths */ 3060b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 306100e6dbe6SBarry Smith olens = dlens + m; 306200e6dbe6SBarry Smith for (i=0; i<m; i++) { 306300e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 306400e6dbe6SBarry Smith olen = 0; 306500e6dbe6SBarry Smith dlen = 0; 306600e6dbe6SBarry Smith for (j=0; j<jend; j++) { 306700e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 306800e6dbe6SBarry Smith else dlen++; 306900e6dbe6SBarry Smith jj++; 307000e6dbe6SBarry Smith } 307100e6dbe6SBarry Smith olens[i] = olen; 307200e6dbe6SBarry Smith dlens[i] = dlen; 307300e6dbe6SBarry Smith } 3074f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3075f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 30767adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3077e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3078606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3079a0ff6018SBarry Smith } else { 3080b1d57f15SBarry Smith PetscInt ml,nl; 3081a0ff6018SBarry Smith 3082a0ff6018SBarry Smith M = *newmat; 3083a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 308429bbc08cSBarry Smith if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3085a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3086c48de900SBarry Smith /* 3087c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3088c48de900SBarry Smith rather than the slower MatSetValues(). 3089c48de900SBarry Smith */ 3090c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3091c48de900SBarry Smith M->assembled = PETSC_FALSE; 3092a0ff6018SBarry Smith } 3093a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3094fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 309500e6dbe6SBarry Smith ii = aij->i; 309600e6dbe6SBarry Smith jj = aij->j; 309700e6dbe6SBarry Smith aa = aij->a; 3098a0ff6018SBarry Smith for (i=0; i<m; i++) { 3099a0ff6018SBarry Smith row = rstart + i; 310000e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 310100e6dbe6SBarry Smith cwork = jj; jj += nz; 310200e6dbe6SBarry Smith vwork = aa; aa += nz; 31038c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3104a0ff6018SBarry Smith } 3105a0ff6018SBarry Smith 3106a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3107a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3108a0ff6018SBarry Smith *newmat = M; 3109fee21e36SBarry Smith 3110fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3111fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3112fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3113fee21e36SBarry Smith ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr); 3114fee21e36SBarry Smith } 3115fee21e36SBarry Smith 3116a0ff6018SBarry Smith PetscFunctionReturn(0); 3117a0ff6018SBarry Smith } 3118273d9f13SBarry Smith 3119e2e86b8fSSatish Balay EXTERN_C_BEGIN 31204a2ae208SSatish Balay #undef __FUNCT__ 3121ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 3122b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3123ccd8e176SBarry Smith { 3124899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3125899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3126ccd8e176SBarry Smith const PetscInt *JJ; 3127ccd8e176SBarry Smith PetscScalar *values; 3128ccd8e176SBarry Smith PetscErrorCode ierr; 3129ccd8e176SBarry Smith 3130ccd8e176SBarry Smith PetscFunctionBegin; 3131b7940d39SSatish Balay if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3132899cda47SBarry Smith 31337408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr); 31347408324eSLisandro Dalcin ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr); 3135d0f46423SBarry Smith ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr); 3136d0f46423SBarry Smith ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr); 3137d0f46423SBarry Smith m = B->rmap->n; 3138d0f46423SBarry Smith cstart = B->cmap->rstart; 3139d0f46423SBarry Smith cend = B->cmap->rend; 3140d0f46423SBarry Smith rstart = B->rmap->rstart; 3141899cda47SBarry Smith 3142ccd8e176SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 3143ccd8e176SBarry Smith o_nnz = d_nnz + m; 3144ccd8e176SBarry Smith 3145ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3146ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3147ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3148ecc77c7aSBarry Smith JJ = J + Ii[i]; 3149ecc77c7aSBarry Smith if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3150ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3151d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3152ecc77c7aSBarry Smith for (j=1; j<nnz; j++) { 3153ecc77c7aSBarry Smith if (JJ[i] <= JJ[i-1]) SETERRRQ(PETSC_ERR_ARG_WRONGSTATE,"Row %D has unsorted column index at %D location in column indices",i,j); 3154ecc77c7aSBarry Smith } 3155ecc77c7aSBarry Smith } 3156ecc77c7aSBarry Smith #endif 3157ecc77c7aSBarry Smith 3158ccd8e176SBarry Smith for (i=0; i<m; i++) { 3159b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3160b7940d39SSatish Balay JJ = J + Ii[i]; 3161ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3162ccd8e176SBarry Smith for (j=0; j<nnz; j++) { 3163ccd8e176SBarry Smith if (*JJ >= cstart) break; 3164ccd8e176SBarry Smith JJ++; 3165ccd8e176SBarry Smith } 3166ccd8e176SBarry Smith d = 0; 3167ccd8e176SBarry Smith for (; j<nnz; j++) { 3168ccd8e176SBarry Smith if (*JJ++ >= cend) break; 3169ccd8e176SBarry Smith d++; 3170ccd8e176SBarry Smith } 3171ccd8e176SBarry Smith d_nnz[i] = d; 3172ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3173ccd8e176SBarry Smith } 3174ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 3175ccd8e176SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 3176ccd8e176SBarry Smith 3177ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3178ccd8e176SBarry Smith else { 3179ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3180ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3181ccd8e176SBarry Smith } 3182ccd8e176SBarry Smith 3183ccd8e176SBarry Smith for (i=0; i<m; i++) { 3184ccd8e176SBarry Smith ii = i + rstart; 3185b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3186b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3187ccd8e176SBarry Smith } 3188ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3189ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3190ccd8e176SBarry Smith 3191ccd8e176SBarry Smith if (!v) { 3192ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3193ccd8e176SBarry Smith } 3194ccd8e176SBarry Smith PetscFunctionReturn(0); 3195ccd8e176SBarry Smith } 3196e2e86b8fSSatish Balay EXTERN_C_END 3197ccd8e176SBarry Smith 3198ccd8e176SBarry Smith #undef __FUNCT__ 3199ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 32001eea217eSSatish Balay /*@ 3201ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3202ccd8e176SBarry Smith (the default parallel PETSc format). 3203ccd8e176SBarry Smith 3204ccd8e176SBarry Smith Collective on MPI_Comm 3205ccd8e176SBarry Smith 3206ccd8e176SBarry Smith Input Parameters: 3207a1661176SMatthew Knepley + B - the matrix 3208ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 3209ccd8e176SBarry Smith . j - the column indices for each local row (starts with zero) these must be sorted for each row 3210ccd8e176SBarry Smith - v - optional values in the matrix 3211ccd8e176SBarry Smith 3212ccd8e176SBarry Smith Level: developer 3213ccd8e176SBarry Smith 321412251496SSatish Balay Notes: 321512251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 321612251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 321712251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 321812251496SSatish Balay 321912251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 322012251496SSatish Balay 322112251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 322212251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 322312251496SSatish Balay as shown: 322412251496SSatish Balay 322512251496SSatish Balay 1 0 0 322612251496SSatish Balay 2 0 3 P0 322712251496SSatish Balay ------- 322812251496SSatish Balay 4 5 6 P1 322912251496SSatish Balay 323012251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 323112251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 323212251496SSatish Balay j = {0,0,2} [size = nz = 6] 323312251496SSatish Balay v = {1,2,3} [size = nz = 6] 323412251496SSatish Balay 323512251496SSatish Balay Process1 [P1]: rows_owned=[2] 323612251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 323712251496SSatish Balay j = {0,1,2} [size = nz = 6] 323812251496SSatish Balay v = {4,5,6} [size = nz = 6] 323912251496SSatish Balay 3240ecc77c7aSBarry Smith The column indices for each row MUST be sorted. 32412fb0ec9aSBarry Smith 3242ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3243ccd8e176SBarry Smith 32442fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ, 32458d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3246ccd8e176SBarry Smith @*/ 3247be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3248ccd8e176SBarry Smith { 3249ccd8e176SBarry Smith PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 3250ccd8e176SBarry Smith 3251ccd8e176SBarry Smith PetscFunctionBegin; 3252ccd8e176SBarry Smith ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr); 3253ccd8e176SBarry Smith if (f) { 3254ccd8e176SBarry Smith ierr = (*f)(B,i,j,v);CHKERRQ(ierr); 3255ccd8e176SBarry Smith } 3256ccd8e176SBarry Smith PetscFunctionReturn(0); 3257ccd8e176SBarry Smith } 3258ccd8e176SBarry Smith 3259ccd8e176SBarry Smith #undef __FUNCT__ 32604a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3261273d9f13SBarry Smith /*@C 3262ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3263273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3264273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3265273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3266273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3267273d9f13SBarry Smith 3268273d9f13SBarry Smith Collective on MPI_Comm 3269273d9f13SBarry Smith 3270273d9f13SBarry Smith Input Parameters: 3271273d9f13SBarry Smith + A - the matrix 3272273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3273273d9f13SBarry Smith (same value is used for all local rows) 3274273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3275273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3276273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3277273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3278273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3279273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3280273d9f13SBarry Smith submatrix (same value is used for all local rows). 3281273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3282273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3283273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3284273d9f13SBarry Smith structure. The size of this array is equal to the number 3285273d9f13SBarry Smith of local rows, i.e 'm'. 3286273d9f13SBarry Smith 328749a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 328849a6f317SBarry Smith 3289273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 3290ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 3291ccd8e176SBarry Smith storage. The stored row and column indices begin with zero. See the users manual for details. 3292273d9f13SBarry Smith 3293273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 3294273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 3295273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 3296273d9f13SBarry Smith 3297273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 3298273d9f13SBarry Smith as the submatrix which is obtained by extraction the part corresponding 3299273d9f13SBarry Smith to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the 3300273d9f13SBarry Smith first row that belongs to the processor, and r2 is the last row belonging 3301273d9f13SBarry Smith to the this processor. This is a square mxm matrix. The remaining portion 3302273d9f13SBarry Smith of the local submatrix (mxN) constitute the OFF-DIAGONAL portion. 3303273d9f13SBarry Smith 3304273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3305273d9f13SBarry Smith 3306aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 3307aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3308aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 3309aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 3310aa95bbe8SBarry Smith 3311273d9f13SBarry Smith Example usage: 3312273d9f13SBarry Smith 3313273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3314273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3315273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3316273d9f13SBarry Smith as follows: 3317273d9f13SBarry Smith 3318273d9f13SBarry Smith .vb 3319273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3320273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3321273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3322273d9f13SBarry Smith ------------------------------------- 3323273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3324273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3325273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3326273d9f13SBarry Smith ------------------------------------- 3327273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3328273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3329273d9f13SBarry Smith .ve 3330273d9f13SBarry Smith 3331273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3332273d9f13SBarry Smith 3333273d9f13SBarry Smith .vb 3334273d9f13SBarry Smith A B C 3335273d9f13SBarry Smith D E F 3336273d9f13SBarry Smith G H I 3337273d9f13SBarry Smith .ve 3338273d9f13SBarry Smith 3339273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3340273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3341273d9f13SBarry Smith 3342273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3343273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3344273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3345273d9f13SBarry Smith 3346273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3347273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3348273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3349273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3350273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3351273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3352273d9f13SBarry Smith 3353273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3354273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3355273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3356273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3357273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3358273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3359273d9f13SBarry Smith .vb 3360273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3361273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3362273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3363273d9f13SBarry Smith .ve 3364273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3365273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3366273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3367273d9f13SBarry Smith 34 values. 3368273d9f13SBarry Smith 3369273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3370273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3371273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3372273d9f13SBarry Smith .vb 3373273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3374273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3375273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3376273d9f13SBarry Smith .ve 3377273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3378273d9f13SBarry Smith hence pre-allocation is perfect. 3379273d9f13SBarry Smith 3380273d9f13SBarry Smith Level: intermediate 3381273d9f13SBarry Smith 3382273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3383273d9f13SBarry Smith 3384ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(), 3385aa95bbe8SBarry Smith MPIAIJ, MatGetInfo() 3386273d9f13SBarry Smith @*/ 3387be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3388273d9f13SBarry Smith { 3389b1d57f15SBarry Smith PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 3390273d9f13SBarry Smith 3391273d9f13SBarry Smith PetscFunctionBegin; 3392a23d5eceSKris Buschelman ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 3393a23d5eceSKris Buschelman if (f) { 3394a23d5eceSKris Buschelman ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3395273d9f13SBarry Smith } 3396273d9f13SBarry Smith PetscFunctionReturn(0); 3397273d9f13SBarry Smith } 3398273d9f13SBarry Smith 33994a2ae208SSatish Balay #undef __FUNCT__ 34002fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 340158d36128SBarry Smith /*@ 34022fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 34032fb0ec9aSBarry Smith CSR format the local rows. 34042fb0ec9aSBarry Smith 34052fb0ec9aSBarry Smith Collective on MPI_Comm 34062fb0ec9aSBarry Smith 34072fb0ec9aSBarry Smith Input Parameters: 34082fb0ec9aSBarry Smith + comm - MPI communicator 34092fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 34102fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 34112fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 34122fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 34132fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 34142fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 34152fb0ec9aSBarry Smith . i - row indices 34162fb0ec9aSBarry Smith . j - column indices 34172fb0ec9aSBarry Smith - a - matrix values 34182fb0ec9aSBarry Smith 34192fb0ec9aSBarry Smith Output Parameter: 34202fb0ec9aSBarry Smith . mat - the matrix 342103bfb495SBarry Smith 34222fb0ec9aSBarry Smith Level: intermediate 34232fb0ec9aSBarry Smith 34242fb0ec9aSBarry Smith Notes: 34252fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 34262fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 34278d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 34282fb0ec9aSBarry Smith 342912251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 343012251496SSatish Balay 343112251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 343212251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 343312251496SSatish Balay as shown: 343412251496SSatish Balay 343512251496SSatish Balay 1 0 0 343612251496SSatish Balay 2 0 3 P0 343712251496SSatish Balay ------- 343812251496SSatish Balay 4 5 6 P1 343912251496SSatish Balay 344012251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 344112251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 344212251496SSatish Balay j = {0,0,2} [size = nz = 6] 344312251496SSatish Balay v = {1,2,3} [size = nz = 6] 344412251496SSatish Balay 344512251496SSatish Balay Process1 [P1]: rows_owned=[2] 344612251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 344712251496SSatish Balay j = {0,1,2} [size = nz = 6] 344812251496SSatish Balay v = {4,5,6} [size = nz = 6] 34492fb0ec9aSBarry Smith 34502fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 34512fb0ec9aSBarry Smith 34522fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 34538d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays() 34542fb0ec9aSBarry Smith @*/ 345582b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 34562fb0ec9aSBarry Smith { 34572fb0ec9aSBarry Smith PetscErrorCode ierr; 34582fb0ec9aSBarry Smith 34592fb0ec9aSBarry Smith PetscFunctionBegin; 34602fb0ec9aSBarry Smith if (i[0]) { 34612fb0ec9aSBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 34622fb0ec9aSBarry Smith } 34632fb0ec9aSBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 34642fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 3465d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 34662fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 34672fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 34682fb0ec9aSBarry Smith PetscFunctionReturn(0); 34692fb0ec9aSBarry Smith } 34702fb0ec9aSBarry Smith 34712fb0ec9aSBarry Smith #undef __FUNCT__ 34724a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ" 3473273d9f13SBarry Smith /*@C 3474273d9f13SBarry Smith MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format 3475273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3476273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3477273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3478273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3479273d9f13SBarry Smith 3480273d9f13SBarry Smith Collective on MPI_Comm 3481273d9f13SBarry Smith 3482273d9f13SBarry Smith Input Parameters: 3483273d9f13SBarry Smith + comm - MPI communicator 3484273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 3485273d9f13SBarry Smith This value should be the same as the local size used in creating the 3486273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 3487273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 3488273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 3489273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 3490273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 3491273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 3492273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3493273d9f13SBarry Smith (same value is used for all local rows) 3494273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3495273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3496273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3497273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3498273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3499273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3500273d9f13SBarry Smith submatrix (same value is used for all local rows). 3501273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3502273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3503273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3504273d9f13SBarry Smith structure. The size of this array is equal to the number 3505273d9f13SBarry Smith of local rows, i.e 'm'. 3506273d9f13SBarry Smith 3507273d9f13SBarry Smith Output Parameter: 3508273d9f13SBarry Smith . A - the matrix 3509273d9f13SBarry Smith 3510175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 3511ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 3512175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 3513175b88e8SBarry Smith 3514273d9f13SBarry Smith Notes: 351549a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 351649a6f317SBarry Smith 3517273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 3518273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 3519273d9f13SBarry Smith storage requirements for this matrix. 3520273d9f13SBarry Smith 3521273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 3522273d9f13SBarry Smith processor than it must be used on all processors that share the object for 3523273d9f13SBarry Smith that argument. 3524273d9f13SBarry Smith 3525273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 3526273d9f13SBarry Smith (possibly both). 3527273d9f13SBarry Smith 352833a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 352933a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 353033a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 353133a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 353233a7c187SSatish Balay values corresponding to [m x N] submatrix. 3533273d9f13SBarry Smith 353433a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 353533a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 353633a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 353733a7c187SSatish Balay 353833a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 353933a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 354033a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 354133a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 354233a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 354333a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 354433a7c187SSatish Balay illustrates this concept. 354533a7c187SSatish Balay 354633a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 354733a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 354833a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 354933a7c187SSatish Balay local matrix (a rectangular submatrix). 3550273d9f13SBarry Smith 3551273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3552273d9f13SBarry Smith 355397d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 355497d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 355597d05335SKris Buschelman type of communicator, use the construction mechanism: 355697d05335SKris Buschelman MatCreate(...,&A); MatSetType(A,MPIAIJ); MatMPIAIJSetPreallocation(A,...); 355797d05335SKris Buschelman 3558273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 3559273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 3560273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 3561273d9f13SBarry Smith 3562273d9f13SBarry Smith Options Database Keys: 3563923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 3564923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3565273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 3566273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 3567273d9f13SBarry Smith the user still MUST index entries starting at 0! 3568273d9f13SBarry Smith 3569273d9f13SBarry Smith 3570273d9f13SBarry Smith Example usage: 3571273d9f13SBarry Smith 3572273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3573273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3574273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3575273d9f13SBarry Smith as follows: 3576273d9f13SBarry Smith 3577273d9f13SBarry Smith .vb 3578273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3579273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3580273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3581273d9f13SBarry Smith ------------------------------------- 3582273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3583273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3584273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3585273d9f13SBarry Smith ------------------------------------- 3586273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3587273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3588273d9f13SBarry Smith .ve 3589273d9f13SBarry Smith 3590273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3591273d9f13SBarry Smith 3592273d9f13SBarry Smith .vb 3593273d9f13SBarry Smith A B C 3594273d9f13SBarry Smith D E F 3595273d9f13SBarry Smith G H I 3596273d9f13SBarry Smith .ve 3597273d9f13SBarry Smith 3598273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3599273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3600273d9f13SBarry Smith 3601273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3602273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3603273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3604273d9f13SBarry Smith 3605273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3606273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3607273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3608273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3609273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3610273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3611273d9f13SBarry Smith 3612273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3613273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3614273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3615273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3616273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3617273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3618273d9f13SBarry Smith .vb 3619273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3620273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3621273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3622273d9f13SBarry Smith .ve 3623273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3624273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3625273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3626273d9f13SBarry Smith 34 values. 3627273d9f13SBarry Smith 3628273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3629273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3630273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3631273d9f13SBarry Smith .vb 3632273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3633273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3634273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3635273d9f13SBarry Smith .ve 3636273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3637273d9f13SBarry Smith hence pre-allocation is perfect. 3638273d9f13SBarry Smith 3639273d9f13SBarry Smith Level: intermediate 3640273d9f13SBarry Smith 3641273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3642273d9f13SBarry Smith 3643ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 36442fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 3645273d9f13SBarry Smith @*/ 3646be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 3647273d9f13SBarry Smith { 36486849ba73SBarry Smith PetscErrorCode ierr; 3649b1d57f15SBarry Smith PetscMPIInt size; 3650273d9f13SBarry Smith 3651273d9f13SBarry Smith PetscFunctionBegin; 3652f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 3653f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 3654273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 3655273d9f13SBarry Smith if (size > 1) { 3656273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 3657273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3658273d9f13SBarry Smith } else { 3659273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 3660273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 3661273d9f13SBarry Smith } 3662273d9f13SBarry Smith PetscFunctionReturn(0); 3663273d9f13SBarry Smith } 3664195d93cdSBarry Smith 36654a2ae208SSatish Balay #undef __FUNCT__ 36664a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 3667be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[]) 3668195d93cdSBarry Smith { 3669195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 3670b1d57f15SBarry Smith 3671195d93cdSBarry Smith PetscFunctionBegin; 3672195d93cdSBarry Smith *Ad = a->A; 3673195d93cdSBarry Smith *Ao = a->B; 3674195d93cdSBarry Smith *colmap = a->garray; 3675195d93cdSBarry Smith PetscFunctionReturn(0); 3676195d93cdSBarry Smith } 3677a2243be0SBarry Smith 3678a2243be0SBarry Smith #undef __FUNCT__ 3679a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 3680dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 3681a2243be0SBarry Smith { 3682dfbe8321SBarry Smith PetscErrorCode ierr; 3683b1d57f15SBarry Smith PetscInt i; 3684a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3685a2243be0SBarry Smith 3686a2243be0SBarry Smith PetscFunctionBegin; 36878ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 368808b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 3689a2243be0SBarry Smith ISColoring ocoloring; 3690a2243be0SBarry Smith 3691a2243be0SBarry Smith /* set coloring for diagonal portion */ 3692a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 3693a2243be0SBarry Smith 3694a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 36957adad957SLisandro Dalcin ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr); 3696d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3697d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3698a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 3699a2243be0SBarry Smith } 3700a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 3701d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3702a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3703a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3704a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 370508b6dcc0SBarry Smith ISColoringValue *colors; 3706b1d57f15SBarry Smith PetscInt *larray; 3707a2243be0SBarry Smith ISColoring ocoloring; 3708a2243be0SBarry Smith 3709a2243be0SBarry Smith /* set coloring for diagonal portion */ 3710d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3711d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3712d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 3713a2243be0SBarry Smith } 3714d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr); 3715d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3716d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3717a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3718a2243be0SBarry Smith } 3719a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3720d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3721a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 3722a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3723a2243be0SBarry Smith 3724a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 3725d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3726d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr); 3727d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3728d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3729a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3730a2243be0SBarry Smith } 3731a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3732d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3733a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3734a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3735a2243be0SBarry Smith } else { 373677431f27SBarry Smith SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 3737a2243be0SBarry Smith } 3738a2243be0SBarry Smith 3739a2243be0SBarry Smith PetscFunctionReturn(0); 3740a2243be0SBarry Smith } 3741a2243be0SBarry Smith 3742dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 3743a2243be0SBarry Smith #undef __FUNCT__ 3744779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ" 3745dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues) 3746a2243be0SBarry Smith { 3747a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3748dfbe8321SBarry Smith PetscErrorCode ierr; 3749a2243be0SBarry Smith 3750a2243be0SBarry Smith PetscFunctionBegin; 3751779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr); 3752779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr); 3753779c1a83SBarry Smith PetscFunctionReturn(0); 3754779c1a83SBarry Smith } 3755dcf5cc72SBarry Smith #endif 3756779c1a83SBarry Smith 3757779c1a83SBarry Smith #undef __FUNCT__ 3758779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 3759b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 3760779c1a83SBarry Smith { 3761779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3762dfbe8321SBarry Smith PetscErrorCode ierr; 3763779c1a83SBarry Smith 3764779c1a83SBarry Smith PetscFunctionBegin; 3765779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 3766779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 3767a2243be0SBarry Smith PetscFunctionReturn(0); 3768a2243be0SBarry Smith } 3769c5d6d63eSBarry Smith 3770c5d6d63eSBarry Smith #undef __FUNCT__ 377151dd7536SBarry Smith #define __FUNCT__ "MatMerge" 3772bc08b0f1SBarry Smith /*@ 377351dd7536SBarry Smith MatMerge - Creates a single large PETSc matrix by concatinating sequential 377451dd7536SBarry Smith matrices from each processor 3775c5d6d63eSBarry Smith 3776c5d6d63eSBarry Smith Collective on MPI_Comm 3777c5d6d63eSBarry Smith 3778c5d6d63eSBarry Smith Input Parameters: 377951dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 3780d6bb3c2dSHong Zhang . inmat - the input sequential matrices 37810e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3782d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 378351dd7536SBarry Smith 378451dd7536SBarry Smith Output Parameter: 378551dd7536SBarry Smith . outmat - the parallel matrix generated 3786c5d6d63eSBarry Smith 37877e25d530SSatish Balay Level: advanced 37887e25d530SSatish Balay 3789f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 3790c5d6d63eSBarry Smith 3791c5d6d63eSBarry Smith @*/ 3792be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 3793c5d6d63eSBarry Smith { 3794dfbe8321SBarry Smith PetscErrorCode ierr; 3795b7940d39SSatish Balay PetscInt m,N,i,rstart,nnz,Ii,*dnz,*onz; 3796ba8c8a56SBarry Smith PetscInt *indx; 3797ba8c8a56SBarry Smith PetscScalar *values; 3798c5d6d63eSBarry Smith 3799c5d6d63eSBarry Smith PetscFunctionBegin; 38000e36024fSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 3801d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 3802d6bb3c2dSHong Zhang /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */ 38030e36024fSHong Zhang if (n == PETSC_DECIDE){ 3804357abbc8SBarry Smith ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 38050e36024fSHong Zhang } 3806357abbc8SBarry Smith ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 3807357abbc8SBarry Smith rstart -= m; 3808d6bb3c2dSHong Zhang 3809d6bb3c2dSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 3810d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3811ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3812d6bb3c2dSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 3813ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3814d6bb3c2dSHong Zhang } 3815d6bb3c2dSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 3816f69a0ea3SMatthew Knepley ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 3817f69a0ea3SMatthew Knepley ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 3818d6bb3c2dSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 3819d6bb3c2dSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 3820d6bb3c2dSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 3821d6bb3c2dSHong Zhang 3822d6bb3c2dSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 3823d6bb3c2dSHong Zhang ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr); 3824d6bb3c2dSHong Zhang } else { 382577431f27SBarry Smith SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 3826d6bb3c2dSHong Zhang } 3827d6bb3c2dSHong Zhang 3828d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3829ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3830b7940d39SSatish Balay Ii = i + rstart; 3831b7940d39SSatish Balay ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3832ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3833d6bb3c2dSHong Zhang } 3834d6bb3c2dSHong Zhang ierr = MatDestroy(inmat);CHKERRQ(ierr); 3835d6bb3c2dSHong Zhang ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3836d6bb3c2dSHong Zhang ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 383751dd7536SBarry Smith 3838c5d6d63eSBarry Smith PetscFunctionReturn(0); 3839c5d6d63eSBarry Smith } 3840c5d6d63eSBarry Smith 3841c5d6d63eSBarry Smith #undef __FUNCT__ 3842c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 3843dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 3844c5d6d63eSBarry Smith { 3845dfbe8321SBarry Smith PetscErrorCode ierr; 384632dcc486SBarry Smith PetscMPIInt rank; 3847b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 3848de4209c5SBarry Smith size_t len; 3849b1d57f15SBarry Smith const PetscInt *indx; 3850c5d6d63eSBarry Smith PetscViewer out; 3851c5d6d63eSBarry Smith char *name; 3852c5d6d63eSBarry Smith Mat B; 3853b3cc6726SBarry Smith const PetscScalar *values; 3854c5d6d63eSBarry Smith 3855c5d6d63eSBarry Smith PetscFunctionBegin; 3856c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 3857c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 3858f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 3859f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 3860f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 3861f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 3862f204ca49SKris Buschelman ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr); 3863c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 3864c5d6d63eSBarry Smith for (i=0;i<m;i++) { 3865c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3866c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3867c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3868c5d6d63eSBarry Smith } 3869c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3870c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3871c5d6d63eSBarry Smith 38727adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 3873c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 3874c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 3875c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 3876852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 3877c5d6d63eSBarry Smith ierr = PetscFree(name); 3878c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 3879c5d6d63eSBarry Smith ierr = PetscViewerDestroy(out);CHKERRQ(ierr); 3880c5d6d63eSBarry Smith ierr = MatDestroy(B);CHKERRQ(ierr); 3881c5d6d63eSBarry Smith PetscFunctionReturn(0); 3882c5d6d63eSBarry Smith } 3883e5f2cdd8SHong Zhang 388451a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat); 388551a7d1a8SHong Zhang #undef __FUNCT__ 388651a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 3887be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 388851a7d1a8SHong Zhang { 388951a7d1a8SHong Zhang PetscErrorCode ierr; 3890671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 3891776b82aeSLisandro Dalcin PetscContainer container; 389251a7d1a8SHong Zhang 389351a7d1a8SHong Zhang PetscFunctionBegin; 3894671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 3895671beff6SHong Zhang if (container) { 3896776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 389751a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 38983e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 38993e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 390051a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 390151a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 390202c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 390302c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 390405b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 390505b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 390605b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 39072c72b5baSSatish Balay ierr = PetscFree(merge->rowmap.range);CHKERRQ(ierr); 3908671beff6SHong Zhang 3909776b82aeSLisandro Dalcin ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 3910671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 3911671beff6SHong Zhang } 391251a7d1a8SHong Zhang ierr = PetscFree(merge);CHKERRQ(ierr); 391351a7d1a8SHong Zhang 391451a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 391551a7d1a8SHong Zhang PetscFunctionReturn(0); 391651a7d1a8SHong Zhang } 391751a7d1a8SHong Zhang 39187c4f633dSBarry Smith #include "../src/mat/utils/freespace.h" 3919be0fcf8dSHong Zhang #include "petscbt.h" 39204ebed01fSBarry Smith 3921e5f2cdd8SHong Zhang #undef __FUNCT__ 392238f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric" 3923e5f2cdd8SHong Zhang /*@C 3924f08fae4eSHong Zhang MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential 3925e5f2cdd8SHong Zhang matrices from each processor 3926e5f2cdd8SHong Zhang 3927e5f2cdd8SHong Zhang Collective on MPI_Comm 3928e5f2cdd8SHong Zhang 3929e5f2cdd8SHong Zhang Input Parameters: 3930e5f2cdd8SHong Zhang + comm - the communicators the parallel matrix will live on 3931f08fae4eSHong Zhang . seqmat - the input sequential matrices 39320e36024fSHong Zhang . m - number of local rows (or PETSC_DECIDE) 39330e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3934e5f2cdd8SHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 3935e5f2cdd8SHong Zhang 3936e5f2cdd8SHong Zhang Output Parameter: 3937f08fae4eSHong Zhang . mpimat - the parallel matrix generated 3938e5f2cdd8SHong Zhang 3939e5f2cdd8SHong Zhang Level: advanced 3940e5f2cdd8SHong Zhang 3941affca5deSHong Zhang Notes: 3942affca5deSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 3943affca5deSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 3944affca5deSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 3945e5f2cdd8SHong Zhang @*/ 3946be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat) 394755d1abb9SHong Zhang { 394855d1abb9SHong Zhang PetscErrorCode ierr; 39497adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mpimat)->comm; 395055d1abb9SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 3951b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 3952d0f46423SBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j; 3953b1d57f15SBarry Smith PetscInt proc,m; 3954b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 3955b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 3956b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 395755d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 395855d1abb9SHong Zhang MPI_Status *status; 3959a77337e4SBarry Smith MatScalar *aa=a->a; 3960dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 396155d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 3962776b82aeSLisandro Dalcin PetscContainer container; 396355d1abb9SHong Zhang 396455d1abb9SHong Zhang PetscFunctionBegin; 39654ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 39663c2c1871SHong Zhang 396755d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 396855d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 396955d1abb9SHong Zhang 397055d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 397155d1abb9SHong Zhang if (container) { 3972776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 397355d1abb9SHong Zhang } 397455d1abb9SHong Zhang bi = merge->bi; 397555d1abb9SHong Zhang bj = merge->bj; 397655d1abb9SHong Zhang buf_ri = merge->buf_ri; 397755d1abb9SHong Zhang buf_rj = merge->buf_rj; 397855d1abb9SHong Zhang 397955d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 3980357abbc8SBarry Smith owners = merge->rowmap.range; 398155d1abb9SHong Zhang len_s = merge->len_s; 398255d1abb9SHong Zhang 398355d1abb9SHong Zhang /* send and recv matrix values */ 398455d1abb9SHong Zhang /*-----------------------------*/ 3985357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 398655d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 398755d1abb9SHong Zhang 398855d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 398955d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++){ 399055d1abb9SHong Zhang if (!len_s[proc]) continue; 399155d1abb9SHong Zhang i = owners[proc]; 399255d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 399355d1abb9SHong Zhang k++; 399455d1abb9SHong Zhang } 399555d1abb9SHong Zhang 39960c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 39970c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 399855d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 399955d1abb9SHong Zhang 400055d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 400155d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 400255d1abb9SHong Zhang 400355d1abb9SHong Zhang /* insert mat values of mpimat */ 400455d1abb9SHong Zhang /*----------------------------*/ 4005a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 4006b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 400755d1abb9SHong Zhang nextrow = buf_ri_k + merge->nrecv; 400855d1abb9SHong Zhang nextai = nextrow + merge->nrecv; 400955d1abb9SHong Zhang 401055d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ 401155d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 401255d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 401355d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 401455d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 401555d1abb9SHong Zhang } 401655d1abb9SHong Zhang 401755d1abb9SHong Zhang /* set values of ba */ 4018357abbc8SBarry Smith m = merge->rowmap.n; 401955d1abb9SHong Zhang for (i=0; i<m; i++) { 402055d1abb9SHong Zhang arow = owners[rank] + i; 402155d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 402255d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4023a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 402455d1abb9SHong Zhang 402555d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 402655d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 402755d1abb9SHong Zhang aj = a->j + ai[arow]; 402855d1abb9SHong Zhang aa = a->a + ai[arow]; 402955d1abb9SHong Zhang nextaj = 0; 403055d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 403155d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 403255d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 403355d1abb9SHong Zhang } 403455d1abb9SHong Zhang } 403555d1abb9SHong Zhang 403655d1abb9SHong Zhang /* add received vals into ba */ 403755d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 403855d1abb9SHong Zhang /* i-th row */ 403955d1abb9SHong Zhang if (i == *nextrow[k]) { 404055d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 404155d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 404255d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 404355d1abb9SHong Zhang nextaj = 0; 404455d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 404555d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 404655d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 404755d1abb9SHong Zhang } 404855d1abb9SHong Zhang } 404955d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 405055d1abb9SHong Zhang } 405155d1abb9SHong Zhang } 405255d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 405355d1abb9SHong Zhang } 405455d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 405555d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 405655d1abb9SHong Zhang 405755d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 405855d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 405955d1abb9SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 40604ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 406155d1abb9SHong Zhang PetscFunctionReturn(0); 406255d1abb9SHong Zhang } 406338f152feSBarry Smith 406438f152feSBarry Smith #undef __FUNCT__ 406538f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic" 4066be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4067e5f2cdd8SHong Zhang { 4068f08fae4eSHong Zhang PetscErrorCode ierr; 406955a3bba9SHong Zhang Mat B_mpi; 4070c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4071b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4072b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4073d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4074b1d57f15SBarry Smith PetscInt len,proc,*dnz,*onz; 4075b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4076b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 407755d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 407858cb9c82SHong Zhang MPI_Status *status; 4079a1a86e44SBarry Smith PetscFreeSpaceList free_space=PETSC_NULL,current_space=PETSC_NULL; 4080be0fcf8dSHong Zhang PetscBT lnkbt; 408151a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4082776b82aeSLisandro Dalcin PetscContainer container; 408302c68681SHong Zhang 4084e5f2cdd8SHong Zhang PetscFunctionBegin; 40854ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 40863c2c1871SHong Zhang 408738f152feSBarry Smith /* make sure it is a PETSc comm */ 408838f152feSBarry Smith ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr); 4089e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4090e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 409155d1abb9SHong Zhang 409251a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4093c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4094e5f2cdd8SHong Zhang 40956abd8857SHong Zhang /* determine row ownership */ 4096f08fae4eSHong Zhang /*---------------------------------------------------------*/ 4097b167c4dbSHong Zhang ierr = PetscMapInitialize(comm,&merge->rowmap);CHKERRQ(ierr); 4098899cda47SBarry Smith merge->rowmap.n = m; 4099899cda47SBarry Smith merge->rowmap.N = M; 4100fc42d0c8SSatish Balay merge->rowmap.bs = 1; 41016148ca0dSBarry Smith ierr = PetscMapSetUp(&merge->rowmap);CHKERRQ(ierr); 4102b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4103b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 410455d1abb9SHong Zhang 4105357abbc8SBarry Smith m = merge->rowmap.n; 4106357abbc8SBarry Smith M = merge->rowmap.N; 4107357abbc8SBarry Smith owners = merge->rowmap.range; 41086abd8857SHong Zhang 41096abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 41106abd8857SHong Zhang /*---------------------------------------------------------*/ 41113e06a4e6SHong Zhang len_s = merge->len_s; 411251a7d1a8SHong Zhang 41132257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4114c2234fe3SHong Zhang merge->nsend = 0; 4115409913e3SHong Zhang for (proc=0; proc<size; proc++){ 41162257cef7SHong Zhang len_si[proc] = 0; 41173e06a4e6SHong Zhang if (proc == rank){ 41186abd8857SHong Zhang len_s[proc] = 0; 41193e06a4e6SHong Zhang } else { 412002c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 41213e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 41223e06a4e6SHong Zhang } 41233e06a4e6SHong Zhang if (len_s[proc]) { 4124c2234fe3SHong Zhang merge->nsend++; 41252257cef7SHong Zhang nrows = 0; 41262257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 41272257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 41282257cef7SHong Zhang } 41292257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 41302257cef7SHong Zhang len += len_si[proc]; 4131409913e3SHong Zhang } 413258cb9c82SHong Zhang } 4133409913e3SHong Zhang 41342257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 41352257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 413651a7d1a8SHong Zhang ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 413755d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4138671beff6SHong Zhang 41393e06a4e6SHong Zhang /* post the Irecv of j-structure */ 41403e06a4e6SHong Zhang /*-------------------------------*/ 41412c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 41423e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 414302c68681SHong Zhang 41443e06a4e6SHong Zhang /* post the Isend of j-structure */ 4145affca5deSHong Zhang /*--------------------------------*/ 41462257cef7SHong Zhang ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr); 414702c68681SHong Zhang sj_waits = si_waits + merge->nsend; 41483e06a4e6SHong Zhang 41492257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++){ 4150409913e3SHong Zhang if (!len_s[proc]) continue; 415102c68681SHong Zhang i = owners[proc]; 4152b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 415351a7d1a8SHong Zhang k++; 415451a7d1a8SHong Zhang } 415551a7d1a8SHong Zhang 41563e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 41573e06a4e6SHong Zhang /*------------------------------------------------*/ 41580c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 41590c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 416002c68681SHong Zhang 416102c68681SHong Zhang /* send and recv i-structure */ 416202c68681SHong Zhang /*---------------------------*/ 41632c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 416402c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 416502c68681SHong Zhang 4166b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 41673e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 41682257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++){ 416902c68681SHong Zhang if (!len_s[proc]) continue; 41703e06a4e6SHong Zhang /* form outgoing message for i-structure: 41713e06a4e6SHong Zhang buf_si[0]: nrows to be sent 41723e06a4e6SHong Zhang [1:nrows]: row index (global) 41733e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 41743e06a4e6SHong Zhang */ 41753e06a4e6SHong Zhang /*-------------------------------------------*/ 41762257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 41773e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 41783e06a4e6SHong Zhang buf_si[0] = nrows; 41793e06a4e6SHong Zhang buf_si_i[0] = 0; 41803e06a4e6SHong Zhang nrows = 0; 41813e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 41823e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 41833e06a4e6SHong Zhang if (anzi) { 41843e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 41853e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 41863e06a4e6SHong Zhang nrows++; 41873e06a4e6SHong Zhang } 41883e06a4e6SHong Zhang } 4189b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 419002c68681SHong Zhang k++; 41912257cef7SHong Zhang buf_si += len_si[proc]; 419202c68681SHong Zhang } 41932257cef7SHong Zhang 41940c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 41950c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 419602c68681SHong Zhang 4197ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 41983e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++){ 4199ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 42003e06a4e6SHong Zhang } 42013e06a4e6SHong Zhang 42023e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 420302c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 420402c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 42053e06a4e6SHong Zhang ierr = PetscFree(si_waits);CHKERRQ(ierr); 42062257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 42073e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4208bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 420958cb9c82SHong Zhang 4210bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4211bcc1bcd5SHong Zhang /*----------------------------------------------*/ 421258cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4213b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 421458cb9c82SHong Zhang bi[0] = 0; 421558cb9c82SHong Zhang 4216be0fcf8dSHong Zhang /* create and initialize a linked list */ 4217be0fcf8dSHong Zhang nlnk = N+1; 4218be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 421958cb9c82SHong Zhang 4220bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 422158cb9c82SHong Zhang len = 0; 4222bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4223a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 422458cb9c82SHong Zhang current_space = free_space; 422558cb9c82SHong Zhang 4226bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 4227b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 42283e06a4e6SHong Zhang nextrow = buf_ri_k + merge->nrecv; 42293e06a4e6SHong Zhang nextai = nextrow + merge->nrecv; 42303e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++){ 42312257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 42323e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 42333e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 42342257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 42353e06a4e6SHong Zhang } 42362257cef7SHong Zhang 4237bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4238bcc1bcd5SHong Zhang len = 0; 423958cb9c82SHong Zhang for (i=0;i<m;i++) { 424058cb9c82SHong Zhang bnzi = 0; 424158cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 424258cb9c82SHong Zhang arow = owners[rank] + i; 424358cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 424458cb9c82SHong Zhang aj = a->j + ai[arow]; 4245be0fcf8dSHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 424658cb9c82SHong Zhang bnzi += nlnk; 424758cb9c82SHong Zhang /* add received col data into lnk */ 424851a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 424955d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 42503e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 42513e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 42523e06a4e6SHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 42533e06a4e6SHong Zhang bnzi += nlnk; 42543e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 42553e06a4e6SHong Zhang } 425658cb9c82SHong Zhang } 4257bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 425858cb9c82SHong Zhang 425958cb9c82SHong Zhang /* if free space is not available, make more free space */ 426058cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 42614238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 426258cb9c82SHong Zhang nspacedouble++; 426358cb9c82SHong Zhang } 426458cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4265be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4266bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4267bcc1bcd5SHong Zhang 426858cb9c82SHong Zhang current_space->array += bnzi; 426958cb9c82SHong Zhang current_space->local_used += bnzi; 427058cb9c82SHong Zhang current_space->local_remaining -= bnzi; 427158cb9c82SHong Zhang 427258cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 427358cb9c82SHong Zhang } 4274bcc1bcd5SHong Zhang 4275bcc1bcd5SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 4276bcc1bcd5SHong Zhang 4277b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4278a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4279be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4280409913e3SHong Zhang 4281bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4282bcc1bcd5SHong Zhang /*---------------------------------------*/ 4283f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 428454b84b50SHong Zhang if (n==PETSC_DECIDE) { 4285f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 428654b84b50SHong Zhang } else { 4287f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 428854b84b50SHong Zhang } 4289bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4290bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4291bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 429258cb9c82SHong Zhang 42936abd8857SHong Zhang /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */ 42946abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 4295affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 4296affca5deSHong Zhang merge->bi = bi; 4297affca5deSHong Zhang merge->bj = bj; 429802c68681SHong Zhang merge->buf_ri = buf_ri; 429902c68681SHong Zhang merge->buf_rj = buf_rj; 4300de0260b3SHong Zhang merge->coi = PETSC_NULL; 4301de0260b3SHong Zhang merge->coj = PETSC_NULL; 4302de0260b3SHong Zhang merge->owners_co = PETSC_NULL; 4303affca5deSHong Zhang 4304affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 4305776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 4306776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 4307affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 4308affca5deSHong Zhang *mpimat = B_mpi; 430938f152feSBarry Smith 431038f152feSBarry Smith ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 43114ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 4312e5f2cdd8SHong Zhang PetscFunctionReturn(0); 4313e5f2cdd8SHong Zhang } 431425616d81SHong Zhang 431538f152feSBarry Smith #undef __FUNCT__ 431638f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI" 4317be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 431855d1abb9SHong Zhang { 431955d1abb9SHong Zhang PetscErrorCode ierr; 432055d1abb9SHong Zhang 432155d1abb9SHong Zhang PetscFunctionBegin; 43224ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 432355d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 432455d1abb9SHong Zhang ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 432555d1abb9SHong Zhang } 432655d1abb9SHong Zhang ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr); 43274ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 432855d1abb9SHong Zhang PetscFunctionReturn(0); 432955d1abb9SHong Zhang } 43304ebed01fSBarry Smith 433125616d81SHong Zhang #undef __FUNCT__ 433225616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat" 4333bc08b0f1SBarry Smith /*@ 433432fba14fSHong Zhang MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows 433525616d81SHong Zhang 433632fba14fSHong Zhang Not Collective 433725616d81SHong Zhang 433825616d81SHong Zhang Input Parameters: 433925616d81SHong Zhang + A - the matrix 434025616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 434125616d81SHong Zhang 434225616d81SHong Zhang Output Parameter: 434325616d81SHong Zhang . A_loc - the local sequential matrix generated 434425616d81SHong Zhang 434525616d81SHong Zhang Level: developer 434625616d81SHong Zhang 434725616d81SHong Zhang @*/ 4348be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 434925616d81SHong Zhang { 435025616d81SHong Zhang PetscErrorCode ierr; 435101b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 435201b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 435301b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 4354a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 4355a77337e4SBarry Smith PetscScalar *ca; 4356d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 43575a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 435825616d81SHong Zhang 435925616d81SHong Zhang PetscFunctionBegin; 43604ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 436101b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4362dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 4363dea91ad1SHong Zhang ci[0] = 0; 436401b7ae99SHong Zhang for (i=0; i<am; i++){ 4365dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 436601b7ae99SHong Zhang } 4367dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 4368dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 4369dea91ad1SHong Zhang k = 0; 437001b7ae99SHong Zhang for (i=0; i<am; i++) { 43715a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 43725a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 437301b7ae99SHong Zhang /* off-diagonal portion of A */ 43745a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 43755a7d977cSHong Zhang col = cmap[*bj]; 43765a7d977cSHong Zhang if (col >= cstart) break; 43775a7d977cSHong Zhang cj[k] = col; bj++; 43785a7d977cSHong Zhang ca[k++] = *ba++; 43795a7d977cSHong Zhang } 43805a7d977cSHong Zhang /* diagonal portion of A */ 43815a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 43825a7d977cSHong Zhang cj[k] = cstart + *aj++; 43835a7d977cSHong Zhang ca[k++] = *aa++; 43845a7d977cSHong Zhang } 43855a7d977cSHong Zhang /* off-diagonal portion of A */ 43865a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 43875a7d977cSHong Zhang cj[k] = cmap[*bj++]; 43885a7d977cSHong Zhang ca[k++] = *ba++; 43895a7d977cSHong Zhang } 439025616d81SHong Zhang } 4391dea91ad1SHong Zhang /* put together the new matrix */ 4392d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 4393dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4394dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 4395dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 4396e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 4397e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 4398dea91ad1SHong Zhang mat->nonew = 0; 43995a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 44005a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 4401a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 44025a7d977cSHong Zhang for (i=0; i<am; i++) { 44035a7d977cSHong Zhang /* off-diagonal portion of A */ 44045a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 44055a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 44065a7d977cSHong Zhang col = cmap[*bj]; 44075a7d977cSHong Zhang if (col >= cstart) break; 4408a77337e4SBarry Smith *cam++ = *ba++; bj++; 44095a7d977cSHong Zhang } 44105a7d977cSHong Zhang /* diagonal portion of A */ 4411ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 4412a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 44135a7d977cSHong Zhang /* off-diagonal portion of A */ 4414f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 4415a77337e4SBarry Smith *cam++ = *ba++; bj++; 4416f33d1a9aSHong Zhang } 44175a7d977cSHong Zhang } 44185a7d977cSHong Zhang } else { 44195a7d977cSHong Zhang SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 442025616d81SHong Zhang } 442101b7ae99SHong Zhang 44224ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 442325616d81SHong Zhang PetscFunctionReturn(0); 442425616d81SHong Zhang } 442525616d81SHong Zhang 442632fba14fSHong Zhang #undef __FUNCT__ 442732fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed" 442832fba14fSHong Zhang /*@C 442932fba14fSHong Zhang MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns 443032fba14fSHong Zhang 443132fba14fSHong Zhang Not Collective 443232fba14fSHong Zhang 443332fba14fSHong Zhang Input Parameters: 443432fba14fSHong Zhang + A - the matrix 443532fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 443632fba14fSHong Zhang - row, col - index sets of rows and columns to extract (or PETSC_NULL) 443732fba14fSHong Zhang 443832fba14fSHong Zhang Output Parameter: 443932fba14fSHong Zhang . A_loc - the local sequential matrix generated 444032fba14fSHong Zhang 444132fba14fSHong Zhang Level: developer 444232fba14fSHong Zhang 444332fba14fSHong Zhang @*/ 4444be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 444532fba14fSHong Zhang { 444632fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 444732fba14fSHong Zhang PetscErrorCode ierr; 444832fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 444932fba14fSHong Zhang IS isrowa,iscola; 445032fba14fSHong Zhang Mat *aloc; 445132fba14fSHong Zhang 445232fba14fSHong Zhang PetscFunctionBegin; 44534ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 445432fba14fSHong Zhang if (!row){ 4455d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 445632fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 445732fba14fSHong Zhang } else { 445832fba14fSHong Zhang isrowa = *row; 445932fba14fSHong Zhang } 446032fba14fSHong Zhang if (!col){ 4461d0f46423SBarry Smith start = A->cmap->rstart; 446232fba14fSHong Zhang cmap = a->garray; 4463d0f46423SBarry Smith nzA = a->A->cmap->n; 4464d0f46423SBarry Smith nzB = a->B->cmap->n; 446532fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 446632fba14fSHong Zhang ncols = 0; 446732fba14fSHong Zhang for (i=0; i<nzB; i++) { 446832fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 446932fba14fSHong Zhang else break; 447032fba14fSHong Zhang } 447132fba14fSHong Zhang imark = i; 447232fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 447332fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 447432fba14fSHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr); 447532fba14fSHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 447632fba14fSHong Zhang } else { 447732fba14fSHong Zhang iscola = *col; 447832fba14fSHong Zhang } 447932fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX){ 448032fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 448132fba14fSHong Zhang aloc[0] = *A_loc; 448232fba14fSHong Zhang } 448332fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 448432fba14fSHong Zhang *A_loc = aloc[0]; 448532fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 448632fba14fSHong Zhang if (!row){ 448732fba14fSHong Zhang ierr = ISDestroy(isrowa);CHKERRQ(ierr); 448832fba14fSHong Zhang } 448932fba14fSHong Zhang if (!col){ 449032fba14fSHong Zhang ierr = ISDestroy(iscola);CHKERRQ(ierr); 449132fba14fSHong Zhang } 44924ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 449332fba14fSHong Zhang PetscFunctionReturn(0); 449432fba14fSHong Zhang } 449532fba14fSHong Zhang 449625616d81SHong Zhang #undef __FUNCT__ 449725616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 449825616d81SHong Zhang /*@C 449932fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 450025616d81SHong Zhang 450125616d81SHong Zhang Collective on Mat 450225616d81SHong Zhang 450325616d81SHong Zhang Input Parameters: 4504e240928fSHong Zhang + A,B - the matrices in mpiaij format 450525616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 450625616d81SHong Zhang - rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL) 450725616d81SHong Zhang 450825616d81SHong Zhang Output Parameter: 450925616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 4510d0f46423SBarry Smith . brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows 451125616d81SHong Zhang - B_seq - the sequential matrix generated 451225616d81SHong Zhang 451325616d81SHong Zhang Level: developer 451425616d81SHong Zhang 451525616d81SHong Zhang @*/ 4516be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq) 451725616d81SHong Zhang { 4518899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 451925616d81SHong Zhang PetscErrorCode ierr; 4520b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 452125616d81SHong Zhang IS isrowb,iscolb; 452225616d81SHong Zhang Mat *bseq; 452325616d81SHong Zhang 452425616d81SHong Zhang PetscFunctionBegin; 4525d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4526d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 452725616d81SHong Zhang } 45284ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 452925616d81SHong Zhang 453025616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4531d0f46423SBarry Smith start = A->cmap->rstart; 453225616d81SHong Zhang cmap = a->garray; 4533d0f46423SBarry Smith nzA = a->A->cmap->n; 4534d0f46423SBarry Smith nzB = a->B->cmap->n; 4535b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 453625616d81SHong Zhang ncols = 0; 45370390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 453825616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 453925616d81SHong Zhang else break; 454025616d81SHong Zhang } 454125616d81SHong Zhang imark = i; 45420390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 45430390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 454425616d81SHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr); 454525616d81SHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 454625616d81SHong Zhang *brstart = imark; 4547d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 454825616d81SHong Zhang } else { 454925616d81SHong Zhang if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 455025616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 455125616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 455225616d81SHong Zhang bseq[0] = *B_seq; 455325616d81SHong Zhang } 455425616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 455525616d81SHong Zhang *B_seq = bseq[0]; 455625616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 455725616d81SHong Zhang if (!rowb){ 455825616d81SHong Zhang ierr = ISDestroy(isrowb);CHKERRQ(ierr); 455925616d81SHong Zhang } else { 456025616d81SHong Zhang *rowb = isrowb; 456125616d81SHong Zhang } 456225616d81SHong Zhang if (!colb){ 456325616d81SHong Zhang ierr = ISDestroy(iscolb);CHKERRQ(ierr); 456425616d81SHong Zhang } else { 456525616d81SHong Zhang *colb = iscolb; 456625616d81SHong Zhang } 45674ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 456825616d81SHong Zhang PetscFunctionReturn(0); 456925616d81SHong Zhang } 4570429d309bSHong Zhang 4571a61c8c0fSHong Zhang #undef __FUNCT__ 4572a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols" 4573429d309bSHong Zhang /*@C 4574429d309bSHong Zhang MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 457501b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 4576429d309bSHong Zhang 4577429d309bSHong Zhang Collective on Mat 4578429d309bSHong Zhang 4579429d309bSHong Zhang Input Parameters: 4580429d309bSHong Zhang + A,B - the matrices in mpiaij format 458187025532SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 458287025532SHong Zhang . startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL) 458387025532SHong Zhang - bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL) 4584429d309bSHong Zhang 4585429d309bSHong Zhang Output Parameter: 458687025532SHong Zhang + B_oth - the sequential matrix generated 4587429d309bSHong Zhang 4588429d309bSHong Zhang Level: developer 4589429d309bSHong Zhang 4590429d309bSHong Zhang @*/ 4591dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth) 4592429d309bSHong Zhang { 4593a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 4594429d309bSHong Zhang PetscErrorCode ierr; 4595899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 459687025532SHong Zhang Mat_SeqAIJ *b_oth; 4597a6b2eed2SHong Zhang VecScatter ctx=a->Mvctx; 45987adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)ctx)->comm; 45997adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 4600d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 4601dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 4602dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 4603e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 4604910ba992SMatthew Knepley MPI_Request *rwaits = PETSC_NULL,*swaits = PETSC_NULL; 460587025532SHong Zhang MPI_Status *sstatus,rstatus; 4606aa5bb8c0SSatish Balay PetscMPIInt jj; 4607e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 4608ba8c8a56SBarry Smith PetscScalar *vals; 4609429d309bSHong Zhang 4610429d309bSHong Zhang PetscFunctionBegin; 4611d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4612d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 4613429d309bSHong Zhang } 46144ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4615a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 4616a6b2eed2SHong Zhang 4617a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 4618a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 4619e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 4620e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 4621a6b2eed2SHong Zhang nrecvs = gen_from->n; 4622a6b2eed2SHong Zhang nsends = gen_to->n; 4623d7ee0231SBarry Smith 4624d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 4625a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 4626a6b2eed2SHong Zhang sstarts = gen_to->starts; 4627a6b2eed2SHong Zhang sprocs = gen_to->procs; 4628a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 4629e42f35eeSHong Zhang sbs = gen_to->bs; 4630e42f35eeSHong Zhang rstarts = gen_from->starts; 4631e42f35eeSHong Zhang rprocs = gen_from->procs; 4632e42f35eeSHong Zhang rbs = gen_from->bs; 4633429d309bSHong Zhang 4634dea91ad1SHong Zhang if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 4635429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4636a6b2eed2SHong Zhang /* i-array */ 4637a6b2eed2SHong Zhang /*---------*/ 4638a6b2eed2SHong Zhang /* post receives */ 4639a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4640e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4641e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 464287025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4643429d309bSHong Zhang } 4644a6b2eed2SHong Zhang 4645a6b2eed2SHong Zhang /* pack the outgoing message */ 464687025532SHong Zhang ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr); 4647a6b2eed2SHong Zhang rstartsj = sstartsj + nsends +1; 4648a6b2eed2SHong Zhang sstartsj[0] = 0; rstartsj[0] = 0; 4649a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 4650a6b2eed2SHong Zhang k = 0; 4651a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4652e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 4653e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 465487025532SHong Zhang for (j=0; j<nrows; j++) { 4655d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 4656e42f35eeSHong Zhang for (l=0; l<sbs; l++){ 4657e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */ 4658e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 4659e42f35eeSHong Zhang len += ncols; 4660e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 4661e42f35eeSHong Zhang } 4662a6b2eed2SHong Zhang k++; 4663429d309bSHong Zhang } 4664e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4665dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 4666429d309bSHong Zhang } 466787025532SHong Zhang /* recvs and sends of i-array are completed */ 466887025532SHong Zhang i = nrecvs; 466987025532SHong Zhang while (i--) { 4670aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 467187025532SHong Zhang } 46720c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4673e42f35eeSHong Zhang 4674a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 4675a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 4676a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 4677a6b2eed2SHong Zhang 467887025532SHong Zhang /* create i-array of B_oth */ 467987025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 468087025532SHong Zhang b_othi[0] = 0; 4681a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 4682a6b2eed2SHong Zhang k = 0; 4683a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4684fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4685e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 468687025532SHong Zhang for (j=0; j<nrows; j++) { 468787025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 4688a6b2eed2SHong Zhang len += rowlen[j]; k++; 4689a6b2eed2SHong Zhang } 4690dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 4691a6b2eed2SHong Zhang } 4692a6b2eed2SHong Zhang 469387025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 469487025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 4695dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 4696a6b2eed2SHong Zhang 469787025532SHong Zhang /* j-array */ 469887025532SHong Zhang /*---------*/ 4699a6b2eed2SHong Zhang /* post receives of j-array */ 4700a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 470187025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 470287025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4703a6b2eed2SHong Zhang } 4704e42f35eeSHong Zhang 4705e42f35eeSHong Zhang /* pack the outgoing message j-array */ 4706a6b2eed2SHong Zhang k = 0; 4707a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4708e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 4709a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 471087025532SHong Zhang for (j=0; j<nrows; j++) { 4711d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4712e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4713e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4714a6b2eed2SHong Zhang for (l=0; l<ncols; l++){ 4715a6b2eed2SHong Zhang *bufJ++ = cols[l]; 471687025532SHong Zhang } 4717e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4718e42f35eeSHong Zhang } 471987025532SHong Zhang } 472087025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 472187025532SHong Zhang } 472287025532SHong Zhang 472387025532SHong Zhang /* recvs and sends of j-array are completed */ 472487025532SHong Zhang i = nrecvs; 472587025532SHong Zhang while (i--) { 4726aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 472787025532SHong Zhang } 47280c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 472987025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 473087025532SHong Zhang sstartsj = *startsj; 473187025532SHong Zhang rstartsj = sstartsj + nsends +1; 473287025532SHong Zhang bufa = *bufa_ptr; 473387025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 473487025532SHong Zhang b_otha = b_oth->a; 473587025532SHong Zhang } else { 473687025532SHong Zhang SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 473787025532SHong Zhang } 473887025532SHong Zhang 473987025532SHong Zhang /* a-array */ 474087025532SHong Zhang /*---------*/ 474187025532SHong Zhang /* post receives of a-array */ 474287025532SHong Zhang for (i=0; i<nrecvs; i++){ 474387025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 474487025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 474587025532SHong Zhang } 4746e42f35eeSHong Zhang 4747e42f35eeSHong Zhang /* pack the outgoing message a-array */ 474887025532SHong Zhang k = 0; 474987025532SHong Zhang for (i=0; i<nsends; i++){ 4750e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 475187025532SHong Zhang bufA = bufa+sstartsj[i]; 475287025532SHong Zhang for (j=0; j<nrows; j++) { 4753d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4754e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4755e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 475687025532SHong Zhang for (l=0; l<ncols; l++){ 4757a6b2eed2SHong Zhang *bufA++ = vals[l]; 4758a6b2eed2SHong Zhang } 4759e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 4760e42f35eeSHong Zhang } 4761a6b2eed2SHong Zhang } 476287025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4763a6b2eed2SHong Zhang } 476487025532SHong Zhang /* recvs and sends of a-array are completed */ 476587025532SHong Zhang i = nrecvs; 476687025532SHong Zhang while (i--) { 4767aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 476887025532SHong Zhang } 47690c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4770d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 4771a6b2eed2SHong Zhang 477287025532SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4773a6b2eed2SHong Zhang /* put together the new matrix */ 4774d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 4775a6b2eed2SHong Zhang 4776a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4777a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 477887025532SHong Zhang b_oth = (Mat_SeqAIJ *)(*B_oth)->data; 4779e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 4780e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 478187025532SHong Zhang b_oth->nonew = 0; 4782a6b2eed2SHong Zhang 4783a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 4784dea91ad1SHong Zhang if (!startsj || !bufa_ptr){ 4785dea91ad1SHong Zhang ierr = PetscFree(sstartsj);CHKERRQ(ierr); 4786dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 4787dea91ad1SHong Zhang } else { 478887025532SHong Zhang *startsj = sstartsj; 478987025532SHong Zhang *bufa_ptr = bufa; 479087025532SHong Zhang } 4791dea91ad1SHong Zhang } 47924ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4793429d309bSHong Zhang PetscFunctionReturn(0); 4794429d309bSHong Zhang } 4795ccd8e176SBarry Smith 479643eb5e2fSMatthew Knepley #undef __FUNCT__ 479743eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 479843eb5e2fSMatthew Knepley /*@C 479943eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 480043eb5e2fSMatthew Knepley 480143eb5e2fSMatthew Knepley Not Collective 480243eb5e2fSMatthew Knepley 480343eb5e2fSMatthew Knepley Input Parameters: 480443eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 480543eb5e2fSMatthew Knepley 480643eb5e2fSMatthew Knepley Output Parameter: 480743eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 480843eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 480943eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 481043eb5e2fSMatthew Knepley 481143eb5e2fSMatthew Knepley Level: developer 481243eb5e2fSMatthew Knepley 481343eb5e2fSMatthew Knepley @*/ 481443eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE) 481543eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 481643eb5e2fSMatthew Knepley #else 481743eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 481843eb5e2fSMatthew Knepley #endif 481943eb5e2fSMatthew Knepley { 482043eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 482143eb5e2fSMatthew Knepley 482243eb5e2fSMatthew Knepley PetscFunctionBegin; 482343eb5e2fSMatthew Knepley PetscValidHeaderSpecific(A, MAT_COOKIE, 1); 482443eb5e2fSMatthew Knepley PetscValidPointer(lvec, 2) 482543eb5e2fSMatthew Knepley PetscValidPointer(colmap, 3) 482643eb5e2fSMatthew Knepley PetscValidPointer(multScatter, 4) 482743eb5e2fSMatthew Knepley a = (Mat_MPIAIJ *) A->data; 482843eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 482943eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 483043eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 483143eb5e2fSMatthew Knepley PetscFunctionReturn(0); 483243eb5e2fSMatthew Knepley } 483343eb5e2fSMatthew Knepley 483417667f90SBarry Smith EXTERN_C_BEGIN 48358cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*); 48368cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*); 483717667f90SBarry Smith EXTERN_C_END 483817667f90SBarry Smith 48397c4f633dSBarry Smith #include "../src/mat/impls/dense/mpi/mpidense.h" 4840fc4dec0aSBarry Smith 4841fc4dec0aSBarry Smith #undef __FUNCT__ 4842fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 4843fc4dec0aSBarry Smith /* 4844fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 4845fc4dec0aSBarry Smith 4846fc4dec0aSBarry Smith n p p 4847fc4dec0aSBarry Smith ( ) ( ) ( ) 4848fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 4849fc4dec0aSBarry Smith ( ) ( ) ( ) 4850fc4dec0aSBarry Smith 4851fc4dec0aSBarry Smith */ 4852fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 4853fc4dec0aSBarry Smith { 4854fc4dec0aSBarry Smith PetscErrorCode ierr; 4855fc4dec0aSBarry Smith Mat At,Bt,Ct; 4856fc4dec0aSBarry Smith 4857fc4dec0aSBarry Smith PetscFunctionBegin; 4858fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 4859fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 4860fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 4861fc4dec0aSBarry Smith ierr = MatDestroy(At);CHKERRQ(ierr); 4862fc4dec0aSBarry Smith ierr = MatDestroy(Bt);CHKERRQ(ierr); 4863fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 4864e5e4356aSBarry Smith ierr = MatDestroy(Ct);CHKERRQ(ierr); 4865fc4dec0aSBarry Smith PetscFunctionReturn(0); 4866fc4dec0aSBarry Smith } 4867fc4dec0aSBarry Smith 4868fc4dec0aSBarry Smith #undef __FUNCT__ 4869fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 4870fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 4871fc4dec0aSBarry Smith { 4872fc4dec0aSBarry Smith PetscErrorCode ierr; 4873d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 4874fc4dec0aSBarry Smith Mat Cmat; 4875fc4dec0aSBarry Smith 4876fc4dec0aSBarry Smith PetscFunctionBegin; 4877d0f46423SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 487839804f7cSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr); 4879fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4880fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 4881fc4dec0aSBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr); 488238556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 488338556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4884fc4dec0aSBarry Smith *C = Cmat; 4885fc4dec0aSBarry Smith PetscFunctionReturn(0); 4886fc4dec0aSBarry Smith } 4887fc4dec0aSBarry Smith 4888fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 4889fc4dec0aSBarry Smith #undef __FUNCT__ 4890fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 4891fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 4892fc4dec0aSBarry Smith { 4893fc4dec0aSBarry Smith PetscErrorCode ierr; 4894fc4dec0aSBarry Smith 4895fc4dec0aSBarry Smith PetscFunctionBegin; 4896fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX){ 4897fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 4898fc4dec0aSBarry Smith } 4899fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 4900fc4dec0aSBarry Smith PetscFunctionReturn(0); 4901fc4dec0aSBarry Smith } 4902fc4dec0aSBarry Smith 49035c9eb25fSBarry Smith EXTERN_C_BEGIN 4904611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 49055c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*); 4906611f576cSBarry Smith #endif 49073bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 49083bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 49093bf14a46SMatthew Knepley #endif 4910611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 49115c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 4912611f576cSBarry Smith #endif 4913611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 49145c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*); 4915611f576cSBarry Smith #endif 49165c9eb25fSBarry Smith EXTERN_C_END 49175c9eb25fSBarry Smith 4918ccd8e176SBarry Smith /*MC 4919ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 4920ccd8e176SBarry Smith 4921ccd8e176SBarry Smith Options Database Keys: 4922ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 4923ccd8e176SBarry Smith 4924ccd8e176SBarry Smith Level: beginner 4925ccd8e176SBarry Smith 4926175b88e8SBarry Smith .seealso: MatCreateMPIAIJ() 4927ccd8e176SBarry Smith M*/ 4928ccd8e176SBarry Smith 4929ccd8e176SBarry Smith EXTERN_C_BEGIN 4930ccd8e176SBarry Smith #undef __FUNCT__ 4931ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 4932be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B) 4933ccd8e176SBarry Smith { 4934ccd8e176SBarry Smith Mat_MPIAIJ *b; 4935ccd8e176SBarry Smith PetscErrorCode ierr; 4936ccd8e176SBarry Smith PetscMPIInt size; 4937ccd8e176SBarry Smith 4938ccd8e176SBarry Smith PetscFunctionBegin; 49397adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr); 4940ccd8e176SBarry Smith 494138f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 4942ccd8e176SBarry Smith B->data = (void*)b; 4943ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 4944d0f46423SBarry Smith B->rmap->bs = 1; 4945ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 4946ccd8e176SBarry Smith B->mapping = 0; 4947ccd8e176SBarry Smith 4948ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 4949ccd8e176SBarry Smith b->size = size; 49507adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 4951ccd8e176SBarry Smith 4952ccd8e176SBarry Smith /* build cache for off array entries formed */ 49537adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 4954ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 4955ccd8e176SBarry Smith b->colmap = 0; 4956ccd8e176SBarry Smith b->garray = 0; 4957ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 4958ccd8e176SBarry Smith 4959ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 4960ccd8e176SBarry Smith b->lvec = PETSC_NULL; 4961ccd8e176SBarry Smith b->Mvctx = PETSC_NULL; 4962ccd8e176SBarry Smith 4963ccd8e176SBarry Smith /* stuff for MatGetRow() */ 4964ccd8e176SBarry Smith b->rowindices = 0; 4965ccd8e176SBarry Smith b->rowvalues = 0; 4966ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 4967ccd8e176SBarry Smith 4968611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 49695c9eb25fSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_spooles_C", 49705c9eb25fSBarry Smith "MatGetFactor_mpiaij_spooles", 49715c9eb25fSBarry Smith MatGetFactor_mpiaij_spooles);CHKERRQ(ierr); 4972611f576cSBarry Smith #endif 4973611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 49745c9eb25fSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_mumps_C", 49755c9eb25fSBarry Smith "MatGetFactor_mpiaij_mumps", 49765c9eb25fSBarry Smith MatGetFactor_mpiaij_mumps);CHKERRQ(ierr); 4977611f576cSBarry Smith #endif 49783bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 49793bf14a46SMatthew Knepley ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_pastix_C", 49803bf14a46SMatthew Knepley "MatGetFactor_mpiaij_pastix", 49813bf14a46SMatthew Knepley MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 49823bf14a46SMatthew Knepley #endif 4983611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 49845c9eb25fSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mpiaij_superlu_dist_C", 49855c9eb25fSBarry Smith "MatGetFactor_mpiaij_superlu_dist", 49865c9eb25fSBarry Smith MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 4987611f576cSBarry Smith #endif 4988ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 4989ccd8e176SBarry Smith "MatStoreValues_MPIAIJ", 4990ccd8e176SBarry Smith MatStoreValues_MPIAIJ);CHKERRQ(ierr); 4991ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 4992ccd8e176SBarry Smith "MatRetrieveValues_MPIAIJ", 4993ccd8e176SBarry Smith MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 4994ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 4995ccd8e176SBarry Smith "MatGetDiagonalBlock_MPIAIJ", 4996ccd8e176SBarry Smith MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 4997ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C", 4998ccd8e176SBarry Smith "MatIsTranspose_MPIAIJ", 4999ccd8e176SBarry Smith MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5000ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C", 5001ccd8e176SBarry Smith "MatMPIAIJSetPreallocation_MPIAIJ", 5002ccd8e176SBarry Smith MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5003ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C", 5004ccd8e176SBarry Smith "MatMPIAIJSetPreallocationCSR_MPIAIJ", 5005ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5006ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C", 5007ccd8e176SBarry Smith "MatDiagonalScaleLocal_MPIAIJ", 5008ccd8e176SBarry Smith MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 500917667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C", 501017667f90SBarry Smith "MatConvert_MPIAIJ_MPICSRPERM", 501117667f90SBarry Smith MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr); 501217667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C", 501317667f90SBarry Smith "MatConvert_MPIAIJ_MPICRL", 501417667f90SBarry Smith MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr); 5015fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C", 5016fc4dec0aSBarry Smith "MatMatMult_MPIDense_MPIAIJ", 5017fc4dec0aSBarry Smith MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5018fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C", 5019fc4dec0aSBarry Smith "MatMatMultSymbolic_MPIDense_MPIAIJ", 5020fc4dec0aSBarry Smith MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5021fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C", 5022fc4dec0aSBarry Smith "MatMatMultNumeric_MPIDense_MPIAIJ", 5023fc4dec0aSBarry Smith MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 502417667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5025ccd8e176SBarry Smith PetscFunctionReturn(0); 5026ccd8e176SBarry Smith } 5027ccd8e176SBarry Smith EXTERN_C_END 502881824310SBarry Smith 502903bfb495SBarry Smith #undef __FUNCT__ 503003bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 503158d36128SBarry Smith /*@ 503203bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 503303bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 503403bfb495SBarry Smith 503503bfb495SBarry Smith Collective on MPI_Comm 503603bfb495SBarry Smith 503703bfb495SBarry Smith Input Parameters: 503803bfb495SBarry Smith + comm - MPI communicator 503903bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 504003bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 504103bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 504203bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 504303bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 504403bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 504503bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 504603bfb495SBarry Smith . j - column indices 504703bfb495SBarry Smith . a - matrix values 504803bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 504903bfb495SBarry Smith . oj - column indices 505003bfb495SBarry Smith - oa - matrix values 505103bfb495SBarry Smith 505203bfb495SBarry Smith Output Parameter: 505303bfb495SBarry Smith . mat - the matrix 505403bfb495SBarry Smith 505503bfb495SBarry Smith Level: advanced 505603bfb495SBarry Smith 505703bfb495SBarry Smith Notes: 505803bfb495SBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. 505903bfb495SBarry Smith 506003bfb495SBarry Smith The i and j indices are 0 based 506103bfb495SBarry Smith 506203bfb495SBarry Smith See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 506303bfb495SBarry Smith 506403bfb495SBarry Smith 506503bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 506603bfb495SBarry Smith 506703bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 50688d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays() 506903bfb495SBarry Smith @*/ 50708d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[], 507103bfb495SBarry Smith PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 507203bfb495SBarry Smith { 507303bfb495SBarry Smith PetscErrorCode ierr; 507403bfb495SBarry Smith Mat_MPIAIJ *maij; 507503bfb495SBarry Smith 507603bfb495SBarry Smith PetscFunctionBegin; 507703bfb495SBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 507803bfb495SBarry Smith if (i[0]) { 507903bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 508003bfb495SBarry Smith } 508103bfb495SBarry Smith if (oi[0]) { 508203bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 508303bfb495SBarry Smith } 508403bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 508503bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 508603bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 508703bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 50888d7a6e47SBarry Smith maij->donotstash = PETSC_TRUE; 50898d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 509003bfb495SBarry Smith 50917408324eSLisandro Dalcin ierr = PetscMapSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr); 50927408324eSLisandro Dalcin ierr = PetscMapSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr); 5093d0f46423SBarry Smith ierr = PetscMapSetUp((*mat)->rmap);CHKERRQ(ierr); 5094d0f46423SBarry Smith ierr = PetscMapSetUp((*mat)->cmap);CHKERRQ(ierr); 509503bfb495SBarry Smith 509603bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5097d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 509803bfb495SBarry Smith 50998d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51008d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51018d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51028d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 51038d7a6e47SBarry Smith 510403bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 510503bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 510603bfb495SBarry Smith PetscFunctionReturn(0); 510703bfb495SBarry Smith } 510803bfb495SBarry Smith 510981824310SBarry Smith /* 511081824310SBarry Smith Special version for direct calls from Fortran 511181824310SBarry Smith */ 511281824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 511381824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 511481824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 511581824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 511681824310SBarry Smith #endif 511781824310SBarry Smith 511881824310SBarry Smith /* Change these macros so can be used in void function */ 511981824310SBarry Smith #undef CHKERRQ 51207adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr) 512181824310SBarry Smith #undef SETERRQ2 51227adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr) 512381824310SBarry Smith #undef SETERRQ 51247adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr) 512581824310SBarry Smith 512681824310SBarry Smith EXTERN_C_BEGIN 512781824310SBarry Smith #undef __FUNCT__ 512881824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 51291f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 513081824310SBarry Smith { 513181824310SBarry Smith Mat mat = *mmat; 513281824310SBarry Smith PetscInt m = *mm, n = *mn; 513381824310SBarry Smith InsertMode addv = *maddv; 513481824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 513581824310SBarry Smith PetscScalar value; 513681824310SBarry Smith PetscErrorCode ierr; 5137899cda47SBarry Smith 5138d9e2c085SLisandro Dalcin ierr = MatPreallocated(mat);CHKERRQ(ierr); 513981824310SBarry Smith if (mat->insertmode == NOT_SET_VALUES) { 514081824310SBarry Smith mat->insertmode = addv; 514181824310SBarry Smith } 514281824310SBarry Smith #if defined(PETSC_USE_DEBUG) 514381824310SBarry Smith else if (mat->insertmode != addv) { 514481824310SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 514581824310SBarry Smith } 514681824310SBarry Smith #endif 514781824310SBarry Smith { 5148d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5149d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 515081824310SBarry Smith PetscTruth roworiented = aij->roworiented; 515181824310SBarry Smith 515281824310SBarry Smith /* Some Variables required in the macro */ 515381824310SBarry Smith Mat A = aij->A; 515481824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 515581824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5156dd6ea824SBarry Smith MatScalar *aa = a->a; 515781824310SBarry Smith PetscTruth ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE); 515881824310SBarry Smith Mat B = aij->B; 515981824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5160d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5161dd6ea824SBarry Smith MatScalar *ba = b->a; 516281824310SBarry Smith 516381824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 516481824310SBarry Smith PetscInt nonew = a->nonew; 5165dd6ea824SBarry Smith MatScalar *ap1,*ap2; 516681824310SBarry Smith 516781824310SBarry Smith PetscFunctionBegin; 516881824310SBarry Smith for (i=0; i<m; i++) { 516981824310SBarry Smith if (im[i] < 0) continue; 517081824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5171d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 517281824310SBarry Smith #endif 517381824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 517481824310SBarry Smith row = im[i] - rstart; 517581824310SBarry Smith lastcol1 = -1; 517681824310SBarry Smith rp1 = aj + ai[row]; 517781824310SBarry Smith ap1 = aa + ai[row]; 517881824310SBarry Smith rmax1 = aimax[row]; 517981824310SBarry Smith nrow1 = ailen[row]; 518081824310SBarry Smith low1 = 0; 518181824310SBarry Smith high1 = nrow1; 518281824310SBarry Smith lastcol2 = -1; 518381824310SBarry Smith rp2 = bj + bi[row]; 518481824310SBarry Smith ap2 = ba + bi[row]; 518581824310SBarry Smith rmax2 = bimax[row]; 518681824310SBarry Smith nrow2 = bilen[row]; 518781824310SBarry Smith low2 = 0; 518881824310SBarry Smith high2 = nrow2; 518981824310SBarry Smith 519081824310SBarry Smith for (j=0; j<n; j++) { 519181824310SBarry Smith if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 519281824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 519381824310SBarry Smith if (in[j] >= cstart && in[j] < cend){ 519481824310SBarry Smith col = in[j] - cstart; 519581824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 519681824310SBarry Smith } else if (in[j] < 0) continue; 519781824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5198d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 519981824310SBarry Smith #endif 520081824310SBarry Smith else { 520181824310SBarry Smith if (mat->was_assembled) { 520281824310SBarry Smith if (!aij->colmap) { 520381824310SBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 520481824310SBarry Smith } 520581824310SBarry Smith #if defined (PETSC_USE_CTABLE) 520681824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 520781824310SBarry Smith col--; 520881824310SBarry Smith #else 520981824310SBarry Smith col = aij->colmap[in[j]] - 1; 521081824310SBarry Smith #endif 521181824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 521281824310SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 521381824310SBarry Smith col = in[j]; 521481824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 521581824310SBarry Smith B = aij->B; 521681824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 521781824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 521881824310SBarry Smith rp2 = bj + bi[row]; 521981824310SBarry Smith ap2 = ba + bi[row]; 522081824310SBarry Smith rmax2 = bimax[row]; 522181824310SBarry Smith nrow2 = bilen[row]; 522281824310SBarry Smith low2 = 0; 522381824310SBarry Smith high2 = nrow2; 5224d0f46423SBarry Smith bm = aij->B->rmap->n; 522581824310SBarry Smith ba = b->a; 522681824310SBarry Smith } 522781824310SBarry Smith } else col = in[j]; 522881824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 522981824310SBarry Smith } 523081824310SBarry Smith } 523181824310SBarry Smith } else { 523281824310SBarry Smith if (!aij->donotstash) { 523381824310SBarry Smith if (roworiented) { 523481824310SBarry Smith if (ignorezeroentries && v[i*n] == 0.0) continue; 523581824310SBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n);CHKERRQ(ierr); 523681824310SBarry Smith } else { 523781824310SBarry Smith if (ignorezeroentries && v[i] == 0.0) continue; 523881824310SBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m);CHKERRQ(ierr); 523981824310SBarry Smith } 524081824310SBarry Smith } 524181824310SBarry Smith } 524281824310SBarry Smith }} 524381824310SBarry Smith PetscFunctionReturnVoid(); 524481824310SBarry Smith } 524581824310SBarry Smith EXTERN_C_END 524603bfb495SBarry Smith 5247