1be1d678aSKris Buschelman #define PETSCMAT_DLL 28a729477SBarry Smith 37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h" /*I "petscmat.h" I*/ 48a729477SBarry Smith 5dd6ea824SBarry Smith #undef __FUNCT__ 6dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 7dd6ea824SBarry Smith /* 8dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 9dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 10dd6ea824SBarry Smith 11dd6ea824SBarry Smith Only for square matrices 12dd6ea824SBarry Smith */ 13dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 14dd6ea824SBarry Smith { 15dd6ea824SBarry Smith PetscMPIInt rank,size; 16dd6ea824SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld; 17dd6ea824SBarry Smith PetscErrorCode ierr; 18dd6ea824SBarry Smith Mat mat; 19dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 20dd6ea824SBarry Smith PetscMPIInt tag; 21dd6ea824SBarry Smith MPI_Status status; 22dd6ea824SBarry Smith PetscTruth aij; 23dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 24dd6ea824SBarry Smith 25dd6ea824SBarry Smith PetscFunctionBegin; 26dd6ea824SBarry Smith CHKMEMQ; 27dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 28dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 29dd6ea824SBarry Smith if (!rank) { 30dd6ea824SBarry Smith ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 31dd6ea824SBarry Smith if (!aij) SETERRQ1(PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 32dd6ea824SBarry Smith } 33dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 34dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 35dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 36dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 37dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 38dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 39dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 40dd6ea824SBarry Smith rowners[0] = 0; 41dd6ea824SBarry Smith for (i=2; i<=size; i++) { 42dd6ea824SBarry Smith rowners[i] += rowners[i-1]; 43dd6ea824SBarry Smith } 44dd6ea824SBarry Smith rstart = rowners[rank]; 45dd6ea824SBarry Smith rend = rowners[rank+1]; 46dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 47dd6ea824SBarry Smith if (!rank) { 48dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 49dd6ea824SBarry Smith /* send row lengths to all processors */ 50dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 51dd6ea824SBarry Smith for (i=1; i<size; i++) { 52dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 53dd6ea824SBarry Smith } 54dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 55dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 56dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 57dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 58dd6ea824SBarry Smith jj = 0; 59dd6ea824SBarry Smith for (i=0; i<m; i++) { 60dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 61dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 62dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 63dd6ea824SBarry Smith jj++; 64dd6ea824SBarry Smith } 65dd6ea824SBarry Smith } 66dd6ea824SBarry Smith /* send column indices to other processes */ 67dd6ea824SBarry Smith for (i=1; i<size; i++) { 68dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 69dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 70dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 71dd6ea824SBarry Smith } 72dd6ea824SBarry Smith 73dd6ea824SBarry Smith /* send numerical values to other processes */ 74dd6ea824SBarry Smith for (i=1; i<size; i++) { 75dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 76dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 77dd6ea824SBarry Smith } 78dd6ea824SBarry Smith gmataa = gmata->a; 79dd6ea824SBarry Smith gmataj = gmata->j; 80dd6ea824SBarry Smith 81dd6ea824SBarry Smith } else { 82dd6ea824SBarry Smith /* receive row lengths */ 83dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 84dd6ea824SBarry Smith /* receive column indices */ 85dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 86dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 87dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 88dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 89dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 90dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 91dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 92dd6ea824SBarry Smith jj = 0; 93dd6ea824SBarry Smith for (i=0; i<m; i++) { 94dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 95dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 96dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 97dd6ea824SBarry Smith jj++; 98dd6ea824SBarry Smith } 99dd6ea824SBarry Smith } 100dd6ea824SBarry Smith /* receive numerical values */ 101dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 102dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 103dd6ea824SBarry Smith } 104dd6ea824SBarry Smith /* set preallocation */ 105dd6ea824SBarry Smith for (i=0; i<m; i++) { 106dd6ea824SBarry Smith dlens[i] -= olens[i]; 107dd6ea824SBarry Smith } 108dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 109dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 110dd6ea824SBarry Smith 111dd6ea824SBarry Smith for (i=0; i<m; i++) { 112dd6ea824SBarry Smith dlens[i] += olens[i]; 113dd6ea824SBarry Smith } 114dd6ea824SBarry Smith cnt = 0; 115dd6ea824SBarry Smith for (i=0; i<m; i++) { 116dd6ea824SBarry Smith row = rstart + i; 117dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 118dd6ea824SBarry Smith cnt += dlens[i]; 119dd6ea824SBarry Smith } 120dd6ea824SBarry Smith if (rank) { 121dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 122dd6ea824SBarry Smith } 123dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 124dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 125dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 126dd6ea824SBarry Smith *inmat = mat; 127dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 128dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 129dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 130dd6ea824SBarry Smith mat = *inmat; 131dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 132dd6ea824SBarry Smith if (!rank) { 133dd6ea824SBarry Smith /* send numerical values to other processes */ 134dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 135dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 136dd6ea824SBarry Smith gmataa = gmata->a; 137dd6ea824SBarry Smith for (i=1; i<size; i++) { 138dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 139dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 140dd6ea824SBarry Smith } 141dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 142dd6ea824SBarry Smith } else { 143dd6ea824SBarry Smith /* receive numerical values from process 0*/ 144dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 145dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 146dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 147dd6ea824SBarry Smith } 148dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 149dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 150dd6ea824SBarry Smith ad = Ad->a; 151dd6ea824SBarry Smith ao = Ao->a; 152d0f46423SBarry Smith if (mat->rmap->n) { 153dd6ea824SBarry Smith i = 0; 154dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 155dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 156dd6ea824SBarry Smith } 157d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 158dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 159dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 160dd6ea824SBarry Smith } 161dd6ea824SBarry Smith i--; 162d0f46423SBarry Smith if (mat->rmap->n) { 163dd6ea824SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 164dd6ea824SBarry Smith } 165dd6ea824SBarry Smith if (rank) { 166dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 167dd6ea824SBarry Smith } 168dd6ea824SBarry Smith } 169dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 170dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 171dd6ea824SBarry Smith CHKMEMQ; 172dd6ea824SBarry Smith PetscFunctionReturn(0); 173dd6ea824SBarry Smith } 174dd6ea824SBarry Smith 1750f5bd95cSBarry Smith /* 1760f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 1779e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 1780f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 1790f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 1800f5bd95cSBarry Smith has an order N integer array but is fast to acess. 1819e25ed09SBarry Smith */ 1824a2ae208SSatish Balay #undef __FUNCT__ 1834a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private" 184dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat) 1859e25ed09SBarry Smith { 18644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1876849ba73SBarry Smith PetscErrorCode ierr; 188d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 189dbb450caSBarry Smith 1903a40ed3dSBarry Smith PetscFunctionBegin; 191aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 192273d9f13SBarry Smith ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr); 193b1fc9764SSatish Balay for (i=0; i<n; i++){ 1940f5bd95cSBarry Smith ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr); 195b1fc9764SSatish Balay } 196b1fc9764SSatish Balay #else 197d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 198d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 199d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 200905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 201b1fc9764SSatish Balay #endif 2023a40ed3dSBarry Smith PetscFunctionReturn(0); 2039e25ed09SBarry Smith } 2049e25ed09SBarry Smith 205085a36d4SBarry Smith 2060520107fSSatish Balay #define CHUNKSIZE 15 20730770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 2080520107fSSatish Balay { \ 2097cd84e04SBarry Smith if (col <= lastcol1) low1 = 0; else high1 = nrow1; \ 210fd3458f5SBarry Smith lastcol1 = col;\ 211fd3458f5SBarry Smith while (high1-low1 > 5) { \ 212fd3458f5SBarry Smith t = (low1+high1)/2; \ 213fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 214fd3458f5SBarry Smith else low1 = t; \ 215ba4e3ef2SSatish Balay } \ 216fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 217fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 218fd3458f5SBarry Smith if (rp1[_i] == col) { \ 219fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 220fd3458f5SBarry Smith else ap1[_i] = value; \ 22130770e4dSSatish Balay goto a_noinsert; \ 2220520107fSSatish Balay } \ 2230520107fSSatish Balay } \ 224e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 225e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 226085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 227421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 228669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 2290520107fSSatish Balay /* shift up all the later entries in this row */ \ 2300520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 231fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 232fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 2330520107fSSatish Balay } \ 234fd3458f5SBarry Smith rp1[_i] = col; \ 235fd3458f5SBarry Smith ap1[_i] = value; \ 23630770e4dSSatish Balay a_noinsert: ; \ 237fd3458f5SBarry Smith ailen[row] = nrow1; \ 2380520107fSSatish Balay } 2390a198c4cSBarry Smith 240085a36d4SBarry Smith 24130770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 24230770e4dSSatish Balay { \ 2437cd84e04SBarry Smith if (col <= lastcol2) low2 = 0; else high2 = nrow2; \ 244fd3458f5SBarry Smith lastcol2 = col;\ 245fd3458f5SBarry Smith while (high2-low2 > 5) { \ 246fd3458f5SBarry Smith t = (low2+high2)/2; \ 247fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 248fd3458f5SBarry Smith else low2 = t; \ 249ba4e3ef2SSatish Balay } \ 250fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 251fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 252fd3458f5SBarry Smith if (rp2[_i] == col) { \ 253fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 254fd3458f5SBarry Smith else ap2[_i] = value; \ 25530770e4dSSatish Balay goto b_noinsert; \ 25630770e4dSSatish Balay } \ 25730770e4dSSatish Balay } \ 258e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 259e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 260085a36d4SBarry Smith if (nonew == -1) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 261421e10b8SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 262669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 26330770e4dSSatish Balay /* shift up all the later entries in this row */ \ 26430770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 265fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 266fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 26730770e4dSSatish Balay } \ 268fd3458f5SBarry Smith rp2[_i] = col; \ 269fd3458f5SBarry Smith ap2[_i] = value; \ 27030770e4dSSatish Balay b_noinsert: ; \ 271fd3458f5SBarry Smith bilen[row] = nrow2; \ 27230770e4dSSatish Balay } 27330770e4dSSatish Balay 2744a2ae208SSatish Balay #undef __FUNCT__ 2752fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 2762fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 2772fd7e33dSBarry Smith { 2782fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 2792fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 2802fd7e33dSBarry Smith PetscErrorCode ierr; 2812fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 2822fd7e33dSBarry Smith 2832fd7e33dSBarry Smith PetscFunctionBegin; 2842fd7e33dSBarry Smith /* code only works for square matrices A */ 2852fd7e33dSBarry Smith 2862fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 2872fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 2882fd7e33dSBarry Smith row = row - diag; 2892fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 2902fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 2912fd7e33dSBarry Smith } 2922fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 2932fd7e33dSBarry Smith 2942fd7e33dSBarry Smith /* diagonal part */ 2952fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 2962fd7e33dSBarry Smith 2972fd7e33dSBarry Smith /* right of diagonal part */ 2982fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 2992fd7e33dSBarry Smith PetscFunctionReturn(0); 3002fd7e33dSBarry Smith } 3012fd7e33dSBarry Smith 3022fd7e33dSBarry Smith #undef __FUNCT__ 3034a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 304b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 3058a729477SBarry Smith { 30644a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 30787828ca2SBarry Smith PetscScalar value; 308dfbe8321SBarry Smith PetscErrorCode ierr; 309d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 310d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 311273d9f13SBarry Smith PetscTruth roworiented = aij->roworiented; 3128a729477SBarry Smith 3130520107fSSatish Balay /* Some Variables required in the macro */ 3144ee7247eSSatish Balay Mat A = aij->A; 3154ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 31657809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 317a77337e4SBarry Smith MatScalar *aa = a->a; 318edb03aefSBarry Smith PetscTruth ignorezeroentries = a->ignorezeroentries; 31930770e4dSSatish Balay Mat B = aij->B; 32030770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 321d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 322a77337e4SBarry Smith MatScalar *ba = b->a; 32330770e4dSSatish Balay 324fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 325fd3458f5SBarry Smith PetscInt nonew = a->nonew; 326a77337e4SBarry Smith MatScalar *ap1,*ap2; 3274ee7247eSSatish Balay 3283a40ed3dSBarry Smith PetscFunctionBegin; 3298a729477SBarry Smith for (i=0; i<m; i++) { 3305ef9f2a5SBarry Smith if (im[i] < 0) continue; 3312515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 332d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 3330a198c4cSBarry Smith #endif 3344b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 3354b0e389bSBarry Smith row = im[i] - rstart; 336fd3458f5SBarry Smith lastcol1 = -1; 337fd3458f5SBarry Smith rp1 = aj + ai[row]; 338fd3458f5SBarry Smith ap1 = aa + ai[row]; 339fd3458f5SBarry Smith rmax1 = aimax[row]; 340fd3458f5SBarry Smith nrow1 = ailen[row]; 341fd3458f5SBarry Smith low1 = 0; 342fd3458f5SBarry Smith high1 = nrow1; 343fd3458f5SBarry Smith lastcol2 = -1; 344fd3458f5SBarry Smith rp2 = bj + bi[row]; 345d498b1e9SBarry Smith ap2 = ba + bi[row]; 346fd3458f5SBarry Smith rmax2 = bimax[row]; 347d498b1e9SBarry Smith nrow2 = bilen[row]; 348fd3458f5SBarry Smith low2 = 0; 349fd3458f5SBarry Smith high2 = nrow2; 350fd3458f5SBarry Smith 3511eb62cbbSBarry Smith for (j=0; j<n; j++) { 35216371a99SBarry Smith if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0; 353abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 354fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend){ 355fd3458f5SBarry Smith col = in[j] - cstart; 35630770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 357273d9f13SBarry Smith } else if (in[j] < 0) continue; 3582515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 359d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 3600a198c4cSBarry Smith #endif 3611eb62cbbSBarry Smith else { 362227d817aSBarry Smith if (mat->was_assembled) { 363905e6a2fSBarry Smith if (!aij->colmap) { 364905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 365905e6a2fSBarry Smith } 366aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 3670f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 368fa46199cSSatish Balay col--; 369b1fc9764SSatish Balay #else 370905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 371b1fc9764SSatish Balay #endif 372ec8511deSBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 3732493cbb0SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 3744b0e389bSBarry Smith col = in[j]; 3759bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 376f9508a3cSSatish Balay B = aij->B; 377f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 378e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 379d498b1e9SBarry Smith rp2 = bj + bi[row]; 380d498b1e9SBarry Smith ap2 = ba + bi[row]; 381d498b1e9SBarry Smith rmax2 = bimax[row]; 382d498b1e9SBarry Smith nrow2 = bilen[row]; 383d498b1e9SBarry Smith low2 = 0; 384d498b1e9SBarry Smith high2 = nrow2; 385d0f46423SBarry Smith bm = aij->B->rmap->n; 386f9508a3cSSatish Balay ba = b->a; 387d6dfbf8fSBarry Smith } 388c48de900SBarry Smith } else col = in[j]; 38930770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 3901eb62cbbSBarry Smith } 3911eb62cbbSBarry Smith } 3925ef9f2a5SBarry Smith } else { 39390f02eecSBarry Smith if (!aij->donotstash) { 394d36fbae8SSatish Balay if (roworiented) { 3953b024144SHong Zhang ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 396d36fbae8SSatish Balay } else { 3973b024144SHong Zhang ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 3984b0e389bSBarry Smith } 3991eb62cbbSBarry Smith } 4008a729477SBarry Smith } 40190f02eecSBarry Smith } 4023a40ed3dSBarry Smith PetscFunctionReturn(0); 4038a729477SBarry Smith } 4048a729477SBarry Smith 4054a2ae208SSatish Balay #undef __FUNCT__ 4064a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 407b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 408b49de8d1SLois Curfman McInnes { 409b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 410dfbe8321SBarry Smith PetscErrorCode ierr; 411d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 412d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 413b49de8d1SLois Curfman McInnes 4143a40ed3dSBarry Smith PetscFunctionBegin; 415b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 41697e567efSBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 417d0f46423SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 418b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 419b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 420b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 42197e567efSBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 422d0f46423SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 423b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend){ 424b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 425b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 426fa852ad4SSatish Balay } else { 427905e6a2fSBarry Smith if (!aij->colmap) { 428905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 429905e6a2fSBarry Smith } 430aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 4310f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 432fa46199cSSatish Balay col --; 433b1fc9764SSatish Balay #else 434905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 435b1fc9764SSatish Balay #endif 436e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 437d9d09a02SSatish Balay else { 438b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 439b49de8d1SLois Curfman McInnes } 440b49de8d1SLois Curfman McInnes } 441b49de8d1SLois Curfman McInnes } 442a8c6a408SBarry Smith } else { 44329bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Only local values currently supported"); 444b49de8d1SLois Curfman McInnes } 445b49de8d1SLois Curfman McInnes } 4463a40ed3dSBarry Smith PetscFunctionReturn(0); 447b49de8d1SLois Curfman McInnes } 448bc5ccf88SSatish Balay 449bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec); 450bd0c2dcbSBarry Smith 4514a2ae208SSatish Balay #undef __FUNCT__ 4524a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 453dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 454bc5ccf88SSatish Balay { 455bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 456dfbe8321SBarry Smith PetscErrorCode ierr; 457b1d57f15SBarry Smith PetscInt nstash,reallocs; 458bc5ccf88SSatish Balay InsertMode addv; 459bc5ccf88SSatish Balay 460bc5ccf88SSatish Balay PetscFunctionBegin; 461bc5ccf88SSatish Balay if (aij->donotstash) { 462bc5ccf88SSatish Balay PetscFunctionReturn(0); 463bc5ccf88SSatish Balay } 464bc5ccf88SSatish Balay 465bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 4667adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 467bc5ccf88SSatish Balay if (addv == (ADD_VALUES|INSERT_VALUES)) { 46829bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 469bc5ccf88SSatish Balay } 470bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 471bc5ccf88SSatish Balay 472d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 4738798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 474ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 475bc5ccf88SSatish Balay PetscFunctionReturn(0); 476bc5ccf88SSatish Balay } 477bc5ccf88SSatish Balay 4784a2ae208SSatish Balay #undef __FUNCT__ 4794a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 480dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 481bc5ccf88SSatish Balay { 482bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48391c97fd4SSatish Balay Mat_SeqAIJ *a=(Mat_SeqAIJ *)aij->A->data; 4846849ba73SBarry Smith PetscErrorCode ierr; 485b1d57f15SBarry Smith PetscMPIInt n; 486b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 487e44c0bd4SBarry Smith PetscInt *row,*col; 488e44c0bd4SBarry Smith PetscTruth other_disassembled; 48987828ca2SBarry Smith PetscScalar *val; 490bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 491bc5ccf88SSatish Balay 49291c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */ 493bc5ccf88SSatish Balay PetscFunctionBegin; 494bc5ccf88SSatish Balay if (!aij->donotstash) { 495a2d1c673SSatish Balay while (1) { 4968798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 497a2d1c673SSatish Balay if (!flg) break; 498a2d1c673SSatish Balay 499bc5ccf88SSatish Balay for (i=0; i<n;) { 500bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 501bc5ccf88SSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 502bc5ccf88SSatish Balay if (j < n) ncols = j-i; 503bc5ccf88SSatish Balay else ncols = n-i; 504bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 505bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 506bc5ccf88SSatish Balay i = j; 507bc5ccf88SSatish Balay } 508bc5ccf88SSatish Balay } 5098798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 510bc5ccf88SSatish Balay } 5112f53aa61SHong Zhang a->compressedrow.use = PETSC_FALSE; 512bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 513bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 514bc5ccf88SSatish Balay 515bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 516bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 517bc5ccf88SSatish Balay /* 518bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 519bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 520bc5ccf88SSatish Balay */ 521bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 5227adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 523bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 524bc5ccf88SSatish Balay ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 525ad59fb31SSatish Balay } 526ad59fb31SSatish Balay } 527bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 528bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 529bc5ccf88SSatish Balay } 5304e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 53191c97fd4SSatish Balay ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */ 532bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 533bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 534bc5ccf88SSatish Balay 535606d414cSSatish Balay ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 536606d414cSSatish Balay aij->rowvalues = 0; 537a30b2313SHong Zhang 538a30b2313SHong Zhang /* used by MatAXPY() */ 53991c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 54091c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 541a30b2313SHong Zhang 542a7420bb7SBarry Smith if (aij->diag) {ierr = VecDestroy(aij->diag);CHKERRQ(ierr);aij->diag = 0;} 543bd0c2dcbSBarry Smith if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ; 544bc5ccf88SSatish Balay PetscFunctionReturn(0); 545bc5ccf88SSatish Balay } 546bc5ccf88SSatish Balay 5474a2ae208SSatish Balay #undef __FUNCT__ 5484a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 549dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 5501eb62cbbSBarry Smith { 55144a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 552dfbe8321SBarry Smith PetscErrorCode ierr; 5533a40ed3dSBarry Smith 5543a40ed3dSBarry Smith PetscFunctionBegin; 55578b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 55678b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 5573a40ed3dSBarry Smith PetscFunctionReturn(0); 5581eb62cbbSBarry Smith } 5591eb62cbbSBarry Smith 5604a2ae208SSatish Balay #undef __FUNCT__ 5614a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 562f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag) 5631eb62cbbSBarry Smith { 56444a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 5656849ba73SBarry Smith PetscErrorCode ierr; 5667adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 567d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 568b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 569b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 570b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 571d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 5727adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 5731eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 5741eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 5756543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5766543fbbaSBarry Smith PetscTruth found = PETSC_FALSE; 5776543fbbaSBarry Smith #endif 5781eb62cbbSBarry Smith 5793a40ed3dSBarry Smith PetscFunctionBegin; 5801eb62cbbSBarry Smith /* first count number of contributors to each processor */ 581b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 582b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 583b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 5846543fbbaSBarry Smith j = 0; 5851eb62cbbSBarry Smith for (i=0; i<N; i++) { 5866543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 5876543fbbaSBarry Smith lastidx = idx; 5886543fbbaSBarry Smith for (; j<size; j++) { 5891eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 5906543fbbaSBarry Smith nprocs[2*j]++; 5916543fbbaSBarry Smith nprocs[2*j+1] = 1; 5926543fbbaSBarry Smith owner[i] = j; 5936543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5946543fbbaSBarry Smith found = PETSC_TRUE; 5956543fbbaSBarry Smith #endif 5966543fbbaSBarry Smith break; 5971eb62cbbSBarry Smith } 5981eb62cbbSBarry Smith } 5996543fbbaSBarry Smith #if defined(PETSC_DEBUG) 60029bbc08cSBarry Smith if (!found) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 6016543fbbaSBarry Smith found = PETSC_FALSE; 6026543fbbaSBarry Smith #endif 6031eb62cbbSBarry Smith } 604c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 6051eb62cbbSBarry Smith 6061eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 607c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 6081eb62cbbSBarry Smith 6091eb62cbbSBarry Smith /* post receives: */ 610b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 611b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 6121eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 613b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 6141eb62cbbSBarry Smith } 6151eb62cbbSBarry Smith 6161eb62cbbSBarry Smith /* do sends: 6171eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 6181eb62cbbSBarry Smith the ith processor 6191eb62cbbSBarry Smith */ 620b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 621b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 622b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 6231eb62cbbSBarry Smith starts[0] = 0; 624c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6251eb62cbbSBarry Smith for (i=0; i<N; i++) { 6261eb62cbbSBarry Smith svalues[starts[owner[i]]++] = rows[i]; 6271eb62cbbSBarry Smith } 6281eb62cbbSBarry Smith 6291eb62cbbSBarry Smith starts[0] = 0; 630c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6311eb62cbbSBarry Smith count = 0; 63217699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 633c1dc657dSBarry Smith if (nprocs[2*i+1]) { 634b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 6351eb62cbbSBarry Smith } 6361eb62cbbSBarry Smith } 637606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 6381eb62cbbSBarry Smith 63917699dbbSLois Curfman McInnes base = owners[rank]; 6401eb62cbbSBarry Smith 6411eb62cbbSBarry Smith /* wait on receives */ 642b1d57f15SBarry Smith ierr = PetscMalloc(2*(nrecvs+1)*sizeof(PetscInt),&lens);CHKERRQ(ierr); 6431eb62cbbSBarry Smith source = lens + nrecvs; 6441eb62cbbSBarry Smith count = nrecvs; slen = 0; 6451eb62cbbSBarry Smith while (count) { 646ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 6471eb62cbbSBarry Smith /* unpack receives into our local space */ 648b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 649d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 650d6dfbf8fSBarry Smith lens[imdex] = n; 6511eb62cbbSBarry Smith slen += n; 6521eb62cbbSBarry Smith count--; 6531eb62cbbSBarry Smith } 654606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 6551eb62cbbSBarry Smith 6561eb62cbbSBarry Smith /* move the data into the send scatter */ 657b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 6581eb62cbbSBarry Smith count = 0; 6591eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 6601eb62cbbSBarry Smith values = rvalues + i*nmax; 6611eb62cbbSBarry Smith for (j=0; j<lens[i]; j++) { 6621eb62cbbSBarry Smith lrows[count++] = values[j] - base; 6631eb62cbbSBarry Smith } 6641eb62cbbSBarry Smith } 665606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 666606d414cSSatish Balay ierr = PetscFree(lens);CHKERRQ(ierr); 667606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 668606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 6691eb62cbbSBarry Smith 6701eb62cbbSBarry Smith /* actually zap the local rows */ 6716eb55b6aSBarry Smith /* 6726eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 673a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 6746eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 6756eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 6766eb55b6aSBarry Smith 6776eb55b6aSBarry Smith */ 678e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 679f4df32b1SMatthew Knepley ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr); 680d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 681f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr); 682f4df32b1SMatthew Knepley } else if (diag != 0.0) { 683f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 684fa46199cSSatish Balay if (((Mat_SeqAIJ*)l->A->data)->nonew) { 68529bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\ 686512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 6876525c446SSatish Balay } 688e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 689e2d53e46SBarry Smith row = lrows[i] + rstart; 690f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 691e2d53e46SBarry Smith } 692e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 693e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 6946eb55b6aSBarry Smith } else { 695f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 6966eb55b6aSBarry Smith } 697606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 69872dacd9aSBarry Smith 6991eb62cbbSBarry Smith /* wait on sends */ 7001eb62cbbSBarry Smith if (nsends) { 701b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 702ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 703606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 7041eb62cbbSBarry Smith } 705606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 706606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 7071eb62cbbSBarry Smith 7083a40ed3dSBarry Smith PetscFunctionReturn(0); 7091eb62cbbSBarry Smith } 7101eb62cbbSBarry Smith 7114a2ae208SSatish Balay #undef __FUNCT__ 7124a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 713dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 7141eb62cbbSBarry Smith { 715416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 716dfbe8321SBarry Smith PetscErrorCode ierr; 717b1d57f15SBarry Smith PetscInt nt; 718416022c9SBarry Smith 7193a40ed3dSBarry Smith PetscFunctionBegin; 720a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 721d0f46423SBarry Smith if (nt != A->cmap->n) { 722d0f46423SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 723fbd6ef76SBarry Smith } 724ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 725f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 726ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 727f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 7283a40ed3dSBarry Smith PetscFunctionReturn(0); 7291eb62cbbSBarry Smith } 7301eb62cbbSBarry Smith 7314a2ae208SSatish Balay #undef __FUNCT__ 732bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ" 733bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx) 734bd0c2dcbSBarry Smith { 735bd0c2dcbSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 736bd0c2dcbSBarry Smith PetscErrorCode ierr; 737bd0c2dcbSBarry Smith 738bd0c2dcbSBarry Smith PetscFunctionBegin; 739bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr); 740bd0c2dcbSBarry Smith PetscFunctionReturn(0); 741bd0c2dcbSBarry Smith } 742bd0c2dcbSBarry Smith 743bd0c2dcbSBarry Smith #undef __FUNCT__ 7444a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 745dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 746da3a660dSBarry Smith { 747416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 748dfbe8321SBarry Smith PetscErrorCode ierr; 7493a40ed3dSBarry Smith 7503a40ed3dSBarry Smith PetscFunctionBegin; 751ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 752f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 753ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 754f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 7553a40ed3dSBarry Smith PetscFunctionReturn(0); 756da3a660dSBarry Smith } 757da3a660dSBarry Smith 7584a2ae208SSatish Balay #undef __FUNCT__ 7594a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 760dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 761da3a660dSBarry Smith { 762416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 763dfbe8321SBarry Smith PetscErrorCode ierr; 764a5ff213dSBarry Smith PetscTruth merged; 765da3a660dSBarry Smith 7663a40ed3dSBarry Smith PetscFunctionBegin; 767a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 768da3a660dSBarry Smith /* do nondiagonal part */ 7697c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 770a5ff213dSBarry Smith if (!merged) { 771da3a660dSBarry Smith /* send it on its way */ 772ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 773da3a660dSBarry Smith /* do local part */ 7747c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 775da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 776a5ff213dSBarry Smith /* added in yy until the next line, */ 777ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 778a5ff213dSBarry Smith } else { 779a5ff213dSBarry Smith /* do local part */ 780a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 781a5ff213dSBarry Smith /* send it on its way */ 782ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 783a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 784ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 785a5ff213dSBarry Smith } 7863a40ed3dSBarry Smith PetscFunctionReturn(0); 787da3a660dSBarry Smith } 788da3a660dSBarry Smith 789cd0d46ebSvictorle EXTERN_C_BEGIN 790cd0d46ebSvictorle #undef __FUNCT__ 7915fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 79213c77408SMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscTruth *f) 793cd0d46ebSvictorle { 7944f423910Svictorle MPI_Comm comm; 795cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ *) Amat->data, *Bij; 79666501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 797cd0d46ebSvictorle IS Me,Notme; 7986849ba73SBarry Smith PetscErrorCode ierr; 799b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 800b1d57f15SBarry Smith PetscMPIInt size; 801cd0d46ebSvictorle 802cd0d46ebSvictorle PetscFunctionBegin; 80342e5f5b4Svictorle 80442e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 80566501d38Svictorle Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A; 8065485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 807cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 8084f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 809b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 810b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 81142e5f5b4Svictorle 81242e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 813cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 814cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 815b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 816cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 817cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 818268466fbSBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,&Notme);CHKERRQ(ierr); 819268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 820268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 82166501d38Svictorle Aoff = Aoffs[0]; 822268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 82366501d38Svictorle Boff = Boffs[0]; 8245485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 82566501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 82666501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 82742e5f5b4Svictorle ierr = ISDestroy(Me);CHKERRQ(ierr); 82842e5f5b4Svictorle ierr = ISDestroy(Notme);CHKERRQ(ierr); 82942e5f5b4Svictorle 830cd0d46ebSvictorle PetscFunctionReturn(0); 831cd0d46ebSvictorle } 832cd0d46ebSvictorle EXTERN_C_END 833cd0d46ebSvictorle 8344a2ae208SSatish Balay #undef __FUNCT__ 8354a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 836dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 837da3a660dSBarry Smith { 838416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 839dfbe8321SBarry Smith PetscErrorCode ierr; 840da3a660dSBarry Smith 8413a40ed3dSBarry Smith PetscFunctionBegin; 842da3a660dSBarry Smith /* do nondiagonal part */ 8437c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 844da3a660dSBarry Smith /* send it on its way */ 845ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 846da3a660dSBarry Smith /* do local part */ 8477c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 848a5ff213dSBarry Smith /* receive remote parts */ 849ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 8503a40ed3dSBarry Smith PetscFunctionReturn(0); 851da3a660dSBarry Smith } 852da3a660dSBarry Smith 8531eb62cbbSBarry Smith /* 8541eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 8551eb62cbbSBarry Smith diagonal block 8561eb62cbbSBarry Smith */ 8574a2ae208SSatish Balay #undef __FUNCT__ 8584a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 859dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 8601eb62cbbSBarry Smith { 861dfbe8321SBarry Smith PetscErrorCode ierr; 862416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 8633a40ed3dSBarry Smith 8643a40ed3dSBarry Smith PetscFunctionBegin; 865d0f46423SBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 866d0f46423SBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) { 86729bbc08cSBarry Smith SETERRQ(PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 8683a40ed3dSBarry Smith } 8693a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 8703a40ed3dSBarry Smith PetscFunctionReturn(0); 8711eb62cbbSBarry Smith } 8721eb62cbbSBarry Smith 8734a2ae208SSatish Balay #undef __FUNCT__ 8744a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 875f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 876052efed2SBarry Smith { 877052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 878dfbe8321SBarry Smith PetscErrorCode ierr; 8793a40ed3dSBarry Smith 8803a40ed3dSBarry Smith PetscFunctionBegin; 881f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 882f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 8833a40ed3dSBarry Smith PetscFunctionReturn(0); 884052efed2SBarry Smith } 885052efed2SBarry Smith 8864a2ae208SSatish Balay #undef __FUNCT__ 8874a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 888dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 8891eb62cbbSBarry Smith { 89044a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 891dfbe8321SBarry Smith PetscErrorCode ierr; 89283e2fdc7SBarry Smith 8933a40ed3dSBarry Smith PetscFunctionBegin; 894aa482453SBarry Smith #if defined(PETSC_USE_LOG) 895d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 896a5a9c739SBarry Smith #endif 8978798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 898a7420bb7SBarry Smith if (aij->diag) {ierr = VecDestroy(aij->diag);CHKERRQ(ierr);} 89978b31e54SBarry Smith ierr = MatDestroy(aij->A);CHKERRQ(ierr); 90078b31e54SBarry Smith ierr = MatDestroy(aij->B);CHKERRQ(ierr); 901aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 9029c666560SBarry Smith if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);} 903b1fc9764SSatish Balay #else 90405b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 905b1fc9764SSatish Balay #endif 90605b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 9077c922b88SBarry Smith if (aij->lvec) {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);} 9087c922b88SBarry Smith if (aij->Mvctx) {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);} 90905b42c5fSBarry Smith ierr = PetscFree(aij->rowvalues);CHKERRQ(ierr); 9108aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 911606d414cSSatish Balay ierr = PetscFree(aij);CHKERRQ(ierr); 912901853e0SKris Buschelman 913dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 914901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 915901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 916901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 917901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr); 918901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 919ff69c46cSKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr); 920901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr); 9213a40ed3dSBarry Smith PetscFunctionReturn(0); 9221eb62cbbSBarry Smith } 923ee50ffe9SBarry Smith 9244a2ae208SSatish Balay #undef __FUNCT__ 9258e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 926dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 9278e2fed03SBarry Smith { 9288e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 9298e2fed03SBarry Smith Mat_SeqAIJ* A = (Mat_SeqAIJ*)aij->A->data; 9308e2fed03SBarry Smith Mat_SeqAIJ* B = (Mat_SeqAIJ*)aij->B->data; 9316849ba73SBarry Smith PetscErrorCode ierr; 93232dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 9336f69ff64SBarry Smith int fd; 934a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 935d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 9368e2fed03SBarry Smith PetscScalar *column_values; 9378e2fed03SBarry Smith 9388e2fed03SBarry Smith PetscFunctionBegin; 9397adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 9407adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 9418e2fed03SBarry Smith nz = A->nz + B->nz; 942958c9bccSBarry Smith if (!rank) { 9438e2fed03SBarry Smith header[0] = MAT_FILE_COOKIE; 944d0f46423SBarry Smith header[1] = mat->rmap->N; 945d0f46423SBarry Smith header[2] = mat->cmap->N; 9467adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 9478e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 9486f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9498e2fed03SBarry Smith /* get largest number of rows any processor has */ 950d0f46423SBarry Smith rlen = mat->rmap->n; 951d0f46423SBarry Smith range = mat->rmap->range; 9528e2fed03SBarry Smith for (i=1; i<size; i++) { 9538e2fed03SBarry Smith rlen = PetscMax(rlen,range[i+1] - range[i]); 9548e2fed03SBarry Smith } 9558e2fed03SBarry Smith } else { 9567adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 957d0f46423SBarry Smith rlen = mat->rmap->n; 9588e2fed03SBarry Smith } 9598e2fed03SBarry Smith 9608e2fed03SBarry Smith /* load up the local row counts */ 961b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 962d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9638e2fed03SBarry Smith row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 9648e2fed03SBarry Smith } 9658e2fed03SBarry Smith 9668e2fed03SBarry Smith /* store the row lengths to the file */ 967958c9bccSBarry Smith if (!rank) { 9688e2fed03SBarry Smith MPI_Status status; 969d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9708e2fed03SBarry Smith for (i=1; i<size; i++) { 9718e2fed03SBarry Smith rlen = range[i+1] - range[i]; 9727adad957SLisandro Dalcin ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9736f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9748e2fed03SBarry Smith } 9758e2fed03SBarry Smith } else { 976d0f46423SBarry Smith ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 9778e2fed03SBarry Smith } 9788e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 9798e2fed03SBarry Smith 9808e2fed03SBarry Smith /* load up the local column indices */ 9818e2fed03SBarry Smith nzmax = nz; /* )th processor needs space a largest processor needs */ 9827adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 983b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 9848e2fed03SBarry Smith cnt = 0; 985d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9868e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 9878e2fed03SBarry Smith if ( (col = garray[B->j[j]]) > cstart) break; 9888e2fed03SBarry Smith column_indices[cnt++] = col; 9898e2fed03SBarry Smith } 9908e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 9918e2fed03SBarry Smith column_indices[cnt++] = A->j[k] + cstart; 9928e2fed03SBarry Smith } 9938e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 9948e2fed03SBarry Smith column_indices[cnt++] = garray[B->j[j]]; 9958e2fed03SBarry Smith } 9968e2fed03SBarry Smith } 99777431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 9988e2fed03SBarry Smith 9998e2fed03SBarry Smith /* store the column indices to the file */ 1000958c9bccSBarry Smith if (!rank) { 10018e2fed03SBarry Smith MPI_Status status; 10026f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 10038e2fed03SBarry Smith for (i=1; i<size; i++) { 10047adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 100577431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10067adad957SLisandro Dalcin ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10076f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 10088e2fed03SBarry Smith } 10098e2fed03SBarry Smith } else { 10107adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10117adad957SLisandro Dalcin ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10128e2fed03SBarry Smith } 10138e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 10148e2fed03SBarry Smith 10158e2fed03SBarry Smith /* load up the local column values */ 10168e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 10178e2fed03SBarry Smith cnt = 0; 1018d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 10198e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 10208e2fed03SBarry Smith if ( garray[B->j[j]] > cstart) break; 10218e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10228e2fed03SBarry Smith } 10238e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 10248e2fed03SBarry Smith column_values[cnt++] = A->a[k]; 10258e2fed03SBarry Smith } 10268e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 10278e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10288e2fed03SBarry Smith } 10298e2fed03SBarry Smith } 103077431f27SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10318e2fed03SBarry Smith 10328e2fed03SBarry Smith /* store the column values to the file */ 1033958c9bccSBarry Smith if (!rank) { 10348e2fed03SBarry Smith MPI_Status status; 10356f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10368e2fed03SBarry Smith for (i=1; i<size; i++) { 10377adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 103877431f27SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10397adad957SLisandro Dalcin ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10406f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10418e2fed03SBarry Smith } 10428e2fed03SBarry Smith } else { 10437adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10447adad957SLisandro Dalcin ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10458e2fed03SBarry Smith } 10468e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 10478e2fed03SBarry Smith PetscFunctionReturn(0); 10488e2fed03SBarry Smith } 10498e2fed03SBarry Smith 10508e2fed03SBarry Smith #undef __FUNCT__ 10514a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1052dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1053416022c9SBarry Smith { 105444a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1055dfbe8321SBarry Smith PetscErrorCode ierr; 105632dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1057d38fa0fbSBarry Smith PetscTruth isdraw,iascii,isbinary; 1058b0a32e0cSBarry Smith PetscViewer sviewer; 1059f3ef73ceSBarry Smith PetscViewerFormat format; 1060416022c9SBarry Smith 10613a40ed3dSBarry Smith PetscFunctionBegin; 1062fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 106332077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 10648e2fed03SBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 106532077d6dSBarry Smith if (iascii) { 1066b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1067456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 10684e220ebcSLois Curfman McInnes MatInfo info; 1069923f20ffSKris Buschelman PetscTruth inodes; 1070923f20ffSKris Buschelman 10717adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 1072888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 1073923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr); 1074923f20ffSKris Buschelman if (!inodes) { 107577431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1076d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10776831982aSBarry Smith } else { 107877431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1079d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10806831982aSBarry Smith } 1081888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 108277431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1083888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 108477431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1085b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 108607d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1087a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 10883a40ed3dSBarry Smith PetscFunctionReturn(0); 1089fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1090923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1091923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1092923f20ffSKris Buschelman if (inodes) { 1093923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1094d38fa0fbSBarry Smith } else { 1095d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1096d38fa0fbSBarry Smith } 10973a40ed3dSBarry Smith PetscFunctionReturn(0); 10984aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 10994aedb280SBarry Smith PetscFunctionReturn(0); 110008480c60SBarry Smith } 11018e2fed03SBarry Smith } else if (isbinary) { 11028e2fed03SBarry Smith if (size == 1) { 11037adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11048e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11058e2fed03SBarry Smith } else { 11068e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 11078e2fed03SBarry Smith } 11088e2fed03SBarry Smith PetscFunctionReturn(0); 11090f5bd95cSBarry Smith } else if (isdraw) { 1110b0a32e0cSBarry Smith PetscDraw draw; 111119bcc07fSBarry Smith PetscTruth isnull; 1112b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1113b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 111419bcc07fSBarry Smith } 111519bcc07fSBarry Smith 111617699dbbSLois Curfman McInnes if (size == 1) { 11177adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 111878b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11193a40ed3dSBarry Smith } else { 112095373324SBarry Smith /* assemble the entire matrix onto first processor. */ 112195373324SBarry Smith Mat A; 1122ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1123d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1124dd6ea824SBarry Smith MatScalar *a; 11252ee70a88SLois Curfman McInnes 112632a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 112790d69ab7SBarry Smith PetscTruth flg = PETSC_FALSE; 112832a366e4SMatthew Knepley 11290c235cafSBarry Smith ierr = PetscOptionsGetTruth(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr); 113032a366e4SMatthew Knepley if (!flg) { 113190d69ab7SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 113232a366e4SMatthew Knepley } 113332a366e4SMatthew Knepley } 11340805154bSBarry Smith 11357adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 113617699dbbSLois Curfman McInnes if (!rank) { 1137f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 11383a40ed3dSBarry Smith } else { 1139f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 114095373324SBarry Smith } 1141f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1142f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 1143f204ca49SKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 114452e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1145416022c9SBarry Smith 114695373324SBarry Smith /* copy over the A part */ 1147ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1148d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1149d0f46423SBarry Smith row = mat->rmap->rstart; 1150d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;} 115195373324SBarry Smith for (i=0; i<m; i++) { 1152416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 115395373324SBarry Smith row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 115495373324SBarry Smith } 11552ee70a88SLois Curfman McInnes aj = Aloc->j; 1156d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;} 115795373324SBarry Smith 115895373324SBarry Smith /* copy over the B part */ 1159ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1160d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1161d0f46423SBarry Smith row = mat->rmap->rstart; 1162b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1163b0a32e0cSBarry Smith ct = cols; 1164bfec09a0SHong Zhang for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];} 116595373324SBarry Smith for (i=0; i<m; i++) { 1166416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 116795373324SBarry Smith row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 116895373324SBarry Smith } 1169606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 11706d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 11716d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 117255843e3eSBarry Smith /* 117355843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1174b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 117555843e3eSBarry Smith */ 1176b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1177e03a110bSBarry Smith if (!rank) { 11787adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11796831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 118095373324SBarry Smith } 1181b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 118278b31e54SBarry Smith ierr = MatDestroy(A);CHKERRQ(ierr); 118395373324SBarry Smith } 11843a40ed3dSBarry Smith PetscFunctionReturn(0); 11851eb62cbbSBarry Smith } 11861eb62cbbSBarry Smith 11874a2ae208SSatish Balay #undef __FUNCT__ 11884a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1189dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1190416022c9SBarry Smith { 1191dfbe8321SBarry Smith PetscErrorCode ierr; 119232077d6dSBarry Smith PetscTruth iascii,isdraw,issocket,isbinary; 1193416022c9SBarry Smith 11943a40ed3dSBarry Smith PetscFunctionBegin; 119532077d6dSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr); 1196fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_DRAW,&isdraw);CHKERRQ(ierr); 1197fb9695e5SSatish Balay ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_BINARY,&isbinary);CHKERRQ(ierr); 1198b0a32e0cSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_SOCKET,&issocket);CHKERRQ(ierr); 119932077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 12007b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 12015cd90555SBarry Smith } else { 120279a5c55eSBarry Smith SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name); 1203416022c9SBarry Smith } 12043a40ed3dSBarry Smith PetscFunctionReturn(0); 1205416022c9SBarry Smith } 1206416022c9SBarry Smith 12074a2ae208SSatish Balay #undef __FUNCT__ 120841f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ" 120941f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 12108a729477SBarry Smith { 121144a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1212dfbe8321SBarry Smith PetscErrorCode ierr; 12136987fefcSBarry Smith Vec bb1 = 0; 1214bd0c2dcbSBarry Smith PetscTruth hasop; 12158a729477SBarry Smith 12163a40ed3dSBarry Smith PetscFunctionBegin; 121785911e72SJed Brown if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) { 121885911e72SJed Brown ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 121985911e72SJed Brown } 12202798e883SHong Zhang 1221a2b30743SBarry Smith if (flag == SOR_APPLY_UPPER) { 122241f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 1223a2b30743SBarry Smith PetscFunctionReturn(0); 1224a2b30743SBarry Smith } 1225a2b30743SBarry Smith 1226c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 1227da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 122841f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12292798e883SHong Zhang its--; 1230da3a660dSBarry Smith } 12312798e883SHong Zhang 12322798e883SHong Zhang while (its--) { 1233ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1234ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12352798e883SHong Zhang 1236c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1237efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1238c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12392798e883SHong Zhang 1240c14dc6b6SHong Zhang /* local sweep */ 124141f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12422798e883SHong Zhang } 12433a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP){ 1244da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 124541f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12462798e883SHong Zhang its--; 1247da3a660dSBarry Smith } 12482798e883SHong Zhang while (its--) { 1249ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1250ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12512798e883SHong Zhang 1252c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1253efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1254c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1255c14dc6b6SHong Zhang 1256c14dc6b6SHong Zhang /* local sweep */ 125741f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12582798e883SHong Zhang } 12593a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){ 1260da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 126141f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12622798e883SHong Zhang its--; 1263da3a660dSBarry Smith } 12642798e883SHong Zhang while (its--) { 1265ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1266ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12672798e883SHong Zhang 1268c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1269efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1270c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12712798e883SHong Zhang 1272c14dc6b6SHong Zhang /* local sweep */ 127341f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12742798e883SHong Zhang } 1275a7420bb7SBarry Smith } else if (flag & SOR_EISENSTAT) { 1276a7420bb7SBarry Smith Vec xx1; 1277a7420bb7SBarry Smith 1278a7420bb7SBarry Smith ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr); 127941f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr); 1280a7420bb7SBarry Smith 1281a7420bb7SBarry Smith ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1282a7420bb7SBarry Smith ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1283a7420bb7SBarry Smith if (!mat->diag) { 1284a7420bb7SBarry Smith ierr = MatGetVecs(matin,&mat->diag,PETSC_NULL);CHKERRQ(ierr); 1285a7420bb7SBarry Smith ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr); 1286a7420bb7SBarry Smith } 1287bd0c2dcbSBarry Smith ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1288bd0c2dcbSBarry Smith if (hasop) { 1289bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr); 1290bd0c2dcbSBarry Smith } else { 1291a7420bb7SBarry Smith ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr); 1292bd0c2dcbSBarry Smith } 1293887ee2caSBarry Smith ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr); 1294887ee2caSBarry Smith 1295a7420bb7SBarry Smith ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 1296a7420bb7SBarry Smith 1297a7420bb7SBarry Smith /* local sweep */ 129841f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr); 1299a7420bb7SBarry Smith ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr); 1300a7420bb7SBarry Smith ierr = VecDestroy(xx1);CHKERRQ(ierr); 13013a40ed3dSBarry Smith } else { 130229bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"Parallel SOR not supported"); 1303c16cb8f2SBarry Smith } 1304c14dc6b6SHong Zhang 13056987fefcSBarry Smith if (bb1) {ierr = VecDestroy(bb1);CHKERRQ(ierr);} 13063a40ed3dSBarry Smith PetscFunctionReturn(0); 13078a729477SBarry Smith } 1308a66be287SLois Curfman McInnes 13094a2ae208SSatish Balay #undef __FUNCT__ 131042e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 131142e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 131242e855d1Svictor { 131342e855d1Svictor MPI_Comm comm,pcomm; 13145d0c19d7SBarry Smith PetscInt first,local_size,nrows; 13155d0c19d7SBarry Smith const PetscInt *rows; 1316dbf0e21dSBarry Smith PetscMPIInt size; 131742e855d1Svictor IS crowp,growp,irowp,lrowp,lcolp,icolp; 131842e855d1Svictor PetscErrorCode ierr; 131942e855d1Svictor 132042e855d1Svictor PetscFunctionBegin; 132142e855d1Svictor ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 132242e855d1Svictor /* make a collective version of 'rowp' */ 132342e855d1Svictor ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr); 132442e855d1Svictor if (pcomm==comm) { 132542e855d1Svictor crowp = rowp; 132642e855d1Svictor } else { 132742e855d1Svictor ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr); 132842e855d1Svictor ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr); 132942e855d1Svictor ierr = ISCreateGeneral(comm,nrows,rows,&crowp);CHKERRQ(ierr); 133042e855d1Svictor ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr); 133142e855d1Svictor } 133242e855d1Svictor /* collect the global row permutation and invert it */ 133342e855d1Svictor ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr); 133442e855d1Svictor ierr = ISSetPermutation(growp);CHKERRQ(ierr); 133542e855d1Svictor if (pcomm!=comm) { 133642e855d1Svictor ierr = ISDestroy(crowp);CHKERRQ(ierr); 133742e855d1Svictor } 133842e855d1Svictor ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr); 133942e855d1Svictor /* get the local target indices */ 134042e855d1Svictor ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr); 134142e855d1Svictor ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr); 134242e855d1Svictor ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr); 134342e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,&lrowp);CHKERRQ(ierr); 134442e855d1Svictor ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr); 134542e855d1Svictor ierr = ISDestroy(irowp);CHKERRQ(ierr); 134642e855d1Svictor /* the column permutation is so much easier; 134742e855d1Svictor make a local version of 'colp' and invert it */ 134842e855d1Svictor ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr); 1349dbf0e21dSBarry Smith ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr); 1350dbf0e21dSBarry Smith if (size==1) { 135142e855d1Svictor lcolp = colp; 135242e855d1Svictor } else { 135342e855d1Svictor ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr); 135442e855d1Svictor ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr); 135542e855d1Svictor ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,&lcolp);CHKERRQ(ierr); 135642e855d1Svictor } 1357dbf0e21dSBarry Smith ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 135842e855d1Svictor ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr); 13594aa3045dSJed Brown ierr = ISSetPermutation(icolp);CHKERRQ(ierr); 1360dbf0e21dSBarry Smith if (size>1) { 136142e855d1Svictor ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr); 136242e855d1Svictor ierr = ISDestroy(lcolp);CHKERRQ(ierr); 136342e855d1Svictor } 136442e855d1Svictor /* now we just get the submatrix */ 13654aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr); 136642e855d1Svictor /* clean up */ 136742e855d1Svictor ierr = ISDestroy(lrowp);CHKERRQ(ierr); 136842e855d1Svictor ierr = ISDestroy(icolp);CHKERRQ(ierr); 136942e855d1Svictor PetscFunctionReturn(0); 137042e855d1Svictor } 137142e855d1Svictor 137242e855d1Svictor #undef __FUNCT__ 13734a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1374dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1375a66be287SLois Curfman McInnes { 1376a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1377a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1378dfbe8321SBarry Smith PetscErrorCode ierr; 1379329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1380a66be287SLois Curfman McInnes 13813a40ed3dSBarry Smith PetscFunctionBegin; 13824e220ebcSLois Curfman McInnes info->block_size = 1.0; 13834e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 13844e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 13854e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 13864e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 13874e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 13884e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1389a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 13904e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 13914e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 13924e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 13934e220ebcSLois Curfman McInnes info->memory = isend[3]; 13944e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1395a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 13967adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 13974e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 13984e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 13994e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 14004e220ebcSLois Curfman McInnes info->memory = irecv[3]; 14014e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1402a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 14037adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 14044e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 14054e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 14064e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 14074e220ebcSLois Curfman McInnes info->memory = irecv[3]; 14084e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1409a66be287SLois Curfman McInnes } 14104e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 14114e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 14124e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 14134e220ebcSLois Curfman McInnes 14143a40ed3dSBarry Smith PetscFunctionReturn(0); 1415a66be287SLois Curfman McInnes } 1416a66be287SLois Curfman McInnes 14174a2ae208SSatish Balay #undef __FUNCT__ 14184a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 14194e0d8c25SBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscTruth flg) 1420c74985f6SBarry Smith { 1421c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1422dfbe8321SBarry Smith PetscErrorCode ierr; 1423c74985f6SBarry Smith 14243a40ed3dSBarry Smith PetscFunctionBegin; 142512c028f9SKris Buschelman switch (op) { 1426512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 142712c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 142828b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 1429a9817697SBarry Smith case MAT_KEEP_NONZERO_PATTERN: 143012c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 143112c028f9SKris Buschelman case MAT_USE_INODES: 143212c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 14334e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 14344e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 143512c028f9SKris Buschelman break; 143612c028f9SKris Buschelman case MAT_ROW_ORIENTED: 14374e0d8c25SBarry Smith a->roworiented = flg; 14384e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 14394e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 144012c028f9SKris Buschelman break; 14414e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1442290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 144312c028f9SKris Buschelman break; 144412c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 14457c922b88SBarry Smith a->donotstash = PETSC_TRUE; 144612c028f9SKris Buschelman break; 144777e54ba9SKris Buschelman case MAT_SYMMETRIC: 14484e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 144925f421beSHong Zhang break; 145077e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1451*eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1452*eeffb40dSHong Zhang break; 1453bf108f30SBarry Smith case MAT_HERMITIAN: 1454*eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1455*eeffb40dSHong Zhang break; 1456bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 14574e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 145877e54ba9SKris Buschelman break; 145912c028f9SKris Buschelman default: 1460ad86a440SBarry Smith SETERRQ1(PETSC_ERR_SUP,"unknown option %d",op); 14613a40ed3dSBarry Smith } 14623a40ed3dSBarry Smith PetscFunctionReturn(0); 1463c74985f6SBarry Smith } 1464c74985f6SBarry Smith 14654a2ae208SSatish Balay #undef __FUNCT__ 14664a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1467b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 146839e00950SLois Curfman McInnes { 1469154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 147087828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 14716849ba73SBarry Smith PetscErrorCode ierr; 1472d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1473d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1474b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 147539e00950SLois Curfman McInnes 14763a40ed3dSBarry Smith PetscFunctionBegin; 1477abc0a331SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Already active"); 14787a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 14797a0afa10SBarry Smith 148070f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 14817a0afa10SBarry Smith /* 14827a0afa10SBarry Smith allocate enough space to hold information from the longest row. 14837a0afa10SBarry Smith */ 14847a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1485b1d57f15SBarry Smith PetscInt max = 1,tmp; 1486d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 14877a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 14887a0afa10SBarry Smith if (max < tmp) { max = tmp; } 14897a0afa10SBarry Smith } 1490b1d57f15SBarry Smith ierr = PetscMalloc(max*(sizeof(PetscInt)+sizeof(PetscScalar)),&mat->rowvalues);CHKERRQ(ierr); 1491b1d57f15SBarry Smith mat->rowindices = (PetscInt*)(mat->rowvalues + max); 14927a0afa10SBarry Smith } 14937a0afa10SBarry Smith 149429bbc08cSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Only local rows") 1495abc0e9e4SLois Curfman McInnes lrow = row - rstart; 149639e00950SLois Curfman McInnes 1497154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1498154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1499154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1500f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1501f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1502154123eaSLois Curfman McInnes nztot = nzA + nzB; 1503154123eaSLois Curfman McInnes 150470f0671dSBarry Smith cmap = mat->garray; 1505154123eaSLois Curfman McInnes if (v || idx) { 1506154123eaSLois Curfman McInnes if (nztot) { 1507154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1508b1d57f15SBarry Smith PetscInt imark = -1; 1509154123eaSLois Curfman McInnes if (v) { 151070f0671dSBarry Smith *v = v_p = mat->rowvalues; 151139e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 151270f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1513154123eaSLois Curfman McInnes else break; 1514154123eaSLois Curfman McInnes } 1515154123eaSLois Curfman McInnes imark = i; 151670f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 151770f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1518154123eaSLois Curfman McInnes } 1519154123eaSLois Curfman McInnes if (idx) { 152070f0671dSBarry Smith *idx = idx_p = mat->rowindices; 152170f0671dSBarry Smith if (imark > -1) { 152270f0671dSBarry Smith for (i=0; i<imark; i++) { 152370f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 152470f0671dSBarry Smith } 152570f0671dSBarry Smith } else { 1526154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 152770f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1528154123eaSLois Curfman McInnes else break; 1529154123eaSLois Curfman McInnes } 1530154123eaSLois Curfman McInnes imark = i; 153170f0671dSBarry Smith } 153270f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 153370f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 153439e00950SLois Curfman McInnes } 15353f97c4b0SBarry Smith } else { 15361ca473b0SSatish Balay if (idx) *idx = 0; 15371ca473b0SSatish Balay if (v) *v = 0; 15381ca473b0SSatish Balay } 1539154123eaSLois Curfman McInnes } 154039e00950SLois Curfman McInnes *nz = nztot; 1541f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1542f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 15433a40ed3dSBarry Smith PetscFunctionReturn(0); 154439e00950SLois Curfman McInnes } 154539e00950SLois Curfman McInnes 15464a2ae208SSatish Balay #undef __FUNCT__ 15474a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1548b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 154939e00950SLois Curfman McInnes { 15507a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 15513a40ed3dSBarry Smith 15523a40ed3dSBarry Smith PetscFunctionBegin; 1553abc0a331SBarry Smith if (!aij->getrowactive) { 1554abc0a331SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 15557a0afa10SBarry Smith } 15567a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 15573a40ed3dSBarry Smith PetscFunctionReturn(0); 155839e00950SLois Curfman McInnes } 155939e00950SLois Curfman McInnes 15604a2ae208SSatish Balay #undef __FUNCT__ 15614a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 1562dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 1563855ac2c5SLois Curfman McInnes { 1564855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1565ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 1566dfbe8321SBarry Smith PetscErrorCode ierr; 1567d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 1568329f5518SBarry Smith PetscReal sum = 0.0; 1569a77337e4SBarry Smith MatScalar *v; 157004ca555eSLois Curfman McInnes 15713a40ed3dSBarry Smith PetscFunctionBegin; 157217699dbbSLois Curfman McInnes if (aij->size == 1) { 157314183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 157437fa93a5SLois Curfman McInnes } else { 157504ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 157604ca555eSLois Curfman McInnes v = amat->a; 157704ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 1578aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1579329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 158004ca555eSLois Curfman McInnes #else 158104ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 158204ca555eSLois Curfman McInnes #endif 158304ca555eSLois Curfman McInnes } 158404ca555eSLois Curfman McInnes v = bmat->a; 158504ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 1586aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1587329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 158804ca555eSLois Curfman McInnes #else 158904ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 159004ca555eSLois Curfman McInnes #endif 159104ca555eSLois Curfman McInnes } 15927adad957SLisandro Dalcin ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 159304ca555eSLois Curfman McInnes *norm = sqrt(*norm); 15943a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 1595329f5518SBarry Smith PetscReal *tmp,*tmp2; 1596b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 1597d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 1598d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 1599d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 160004ca555eSLois Curfman McInnes *norm = 0.0; 160104ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 160204ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 1603bfec09a0SHong Zhang tmp[cstart + *jj++ ] += PetscAbsScalar(*v); v++; 160404ca555eSLois Curfman McInnes } 160504ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 160604ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 1607bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 160804ca555eSLois Curfman McInnes } 1609d0f46423SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 1610d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 161104ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 161204ca555eSLois Curfman McInnes } 1613606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 1614606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 16153a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 1616329f5518SBarry Smith PetscReal ntemp = 0.0; 1617d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 1618bfec09a0SHong Zhang v = amat->a + amat->i[j]; 161904ca555eSLois Curfman McInnes sum = 0.0; 162004ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 1621cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 162204ca555eSLois Curfman McInnes } 1623bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 162404ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 1625cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 162604ca555eSLois Curfman McInnes } 1627515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 162804ca555eSLois Curfman McInnes } 16297adad957SLisandro Dalcin ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr); 1630ca161407SBarry Smith } else { 163129bbc08cSBarry Smith SETERRQ(PETSC_ERR_SUP,"No support for two norm"); 163204ca555eSLois Curfman McInnes } 163337fa93a5SLois Curfman McInnes } 16343a40ed3dSBarry Smith PetscFunctionReturn(0); 1635855ac2c5SLois Curfman McInnes } 1636855ac2c5SLois Curfman McInnes 16374a2ae208SSatish Balay #undef __FUNCT__ 16384a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 1639fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 1640b7c46309SBarry Smith { 1641b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1642da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 1643dfbe8321SBarry Smith PetscErrorCode ierr; 1644d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz; 1645d0f46423SBarry Smith PetscInt cstart=A->cmap->rstart,ncol; 16463a40ed3dSBarry Smith Mat B; 1647a77337e4SBarry Smith MatScalar *array; 1648b7c46309SBarry Smith 16493a40ed3dSBarry Smith PetscFunctionBegin; 1650e9695a30SBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 1651da668accSHong Zhang 1652d0f46423SBarry Smith ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; 1653da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 1654da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 1655fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 1656fc73b1b3SBarry Smith /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */ 1657fc73b1b3SBarry Smith ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 1658da668accSHong Zhang ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr); 1659da668accSHong Zhang for (i=0; i<ai[ma]; i++){ 1660da668accSHong Zhang d_nnz[aj[i]] ++; 1661da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 1662d4bb536fSBarry Smith } 1663d4bb536fSBarry Smith 16647adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 1665d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 16667adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1667da668accSHong Zhang ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr); 1668fc73b1b3SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 1669fc4dec0aSBarry Smith } else { 1670fc4dec0aSBarry Smith B = *matout; 1671fc4dec0aSBarry Smith } 1672b7c46309SBarry Smith 1673b7c46309SBarry Smith /* copy over the A part */ 1674da668accSHong Zhang array = Aloc->a; 1675d0f46423SBarry Smith row = A->rmap->rstart; 1676da668accSHong Zhang for (i=0; i<ma; i++) { 1677da668accSHong Zhang ncol = ai[i+1]-ai[i]; 1678da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 1679da668accSHong Zhang row++; array += ncol; aj += ncol; 1680b7c46309SBarry Smith } 1681b7c46309SBarry Smith aj = Aloc->j; 1682da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 1683b7c46309SBarry Smith 1684b7c46309SBarry Smith /* copy over the B part */ 1685fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1686fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 1687da668accSHong Zhang array = Bloc->a; 1688d0f46423SBarry Smith row = A->rmap->rstart; 1689da668accSHong Zhang for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];} 169061a2fbbaSHong Zhang cols_tmp = cols; 1691da668accSHong Zhang for (i=0; i<mb; i++) { 1692da668accSHong Zhang ncol = bi[i+1]-bi[i]; 169361a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 169461a2fbbaSHong Zhang row++; array += ncol; cols_tmp += ncol; 1695b7c46309SBarry Smith } 1696fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 1697fc73b1b3SBarry Smith 16986d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 16996d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1700815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 17010de55854SLois Curfman McInnes *matout = B; 17020de55854SLois Curfman McInnes } else { 1703273d9f13SBarry Smith ierr = MatHeaderCopy(A,B);CHKERRQ(ierr); 17040de55854SLois Curfman McInnes } 17053a40ed3dSBarry Smith PetscFunctionReturn(0); 1706b7c46309SBarry Smith } 1707b7c46309SBarry Smith 17084a2ae208SSatish Balay #undef __FUNCT__ 17094a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 1710dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 1711a008b906SSatish Balay { 17124b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 17134b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 1714dfbe8321SBarry Smith PetscErrorCode ierr; 1715b1d57f15SBarry Smith PetscInt s1,s2,s3; 1716a008b906SSatish Balay 17173a40ed3dSBarry Smith PetscFunctionBegin; 17184b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 17194b967eb1SSatish Balay if (rr) { 1720e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 172129bbc08cSBarry Smith if (s1!=s3) SETERRQ(PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 17224b967eb1SSatish Balay /* Overlap communication with computation. */ 1723ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1724a008b906SSatish Balay } 17254b967eb1SSatish Balay if (ll) { 1726e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 172729bbc08cSBarry Smith if (s1!=s2) SETERRQ(PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 1728f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 17294b967eb1SSatish Balay } 17304b967eb1SSatish Balay /* scale the diagonal block */ 1731f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 17324b967eb1SSatish Balay 17334b967eb1SSatish Balay if (rr) { 17344b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 1735ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1736f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 17374b967eb1SSatish Balay } 17384b967eb1SSatish Balay 17393a40ed3dSBarry Smith PetscFunctionReturn(0); 1740a008b906SSatish Balay } 1741a008b906SSatish Balay 17424a2ae208SSatish Balay #undef __FUNCT__ 1743521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ" 1744521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs) 17455a838052SSatish Balay { 1746521d7252SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1747521d7252SBarry Smith PetscErrorCode ierr; 1748521d7252SBarry Smith 17493a40ed3dSBarry Smith PetscFunctionBegin; 1750521d7252SBarry Smith ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr); 1751521d7252SBarry Smith ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr); 17523a40ed3dSBarry Smith PetscFunctionReturn(0); 17535a838052SSatish Balay } 17544a2ae208SSatish Balay #undef __FUNCT__ 17554a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 1756dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 1757bb5a7306SBarry Smith { 1758bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1759dfbe8321SBarry Smith PetscErrorCode ierr; 17603a40ed3dSBarry Smith 17613a40ed3dSBarry Smith PetscFunctionBegin; 1762bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 17633a40ed3dSBarry Smith PetscFunctionReturn(0); 1764bb5a7306SBarry Smith } 1765bb5a7306SBarry Smith 17664a2ae208SSatish Balay #undef __FUNCT__ 17674a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 1768dfbe8321SBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscTruth *flag) 1769d4bb536fSBarry Smith { 1770d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 1771d4bb536fSBarry Smith Mat a,b,c,d; 1772d4bb536fSBarry Smith PetscTruth flg; 1773dfbe8321SBarry Smith PetscErrorCode ierr; 1774d4bb536fSBarry Smith 17753a40ed3dSBarry Smith PetscFunctionBegin; 1776d4bb536fSBarry Smith a = matA->A; b = matA->B; 1777d4bb536fSBarry Smith c = matB->A; d = matB->B; 1778d4bb536fSBarry Smith 1779d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 1780abc0a331SBarry Smith if (flg) { 1781d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 1782d4bb536fSBarry Smith } 17837adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 17843a40ed3dSBarry Smith PetscFunctionReturn(0); 1785d4bb536fSBarry Smith } 1786d4bb536fSBarry Smith 17874a2ae208SSatish Balay #undef __FUNCT__ 17884a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 1789dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 1790cb5b572fSBarry Smith { 1791dfbe8321SBarry Smith PetscErrorCode ierr; 1792cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 1793cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ *)B->data; 1794cb5b572fSBarry Smith 1795cb5b572fSBarry Smith PetscFunctionBegin; 179633f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 179733f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 1798cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 1799cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 1800cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 1801cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 1802cb5b572fSBarry Smith then copying the submatrices */ 1803cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1804cb5b572fSBarry Smith } else { 1805cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 1806cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 1807cb5b572fSBarry Smith } 1808cb5b572fSBarry Smith PetscFunctionReturn(0); 1809cb5b572fSBarry Smith } 1810cb5b572fSBarry Smith 18114a2ae208SSatish Balay #undef __FUNCT__ 18124a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ" 1813dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A) 1814273d9f13SBarry Smith { 1815dfbe8321SBarry Smith PetscErrorCode ierr; 1816273d9f13SBarry Smith 1817273d9f13SBarry Smith PetscFunctionBegin; 1818273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 1819273d9f13SBarry Smith PetscFunctionReturn(0); 1820273d9f13SBarry Smith } 1821273d9f13SBarry Smith 1822ac90fabeSBarry Smith #include "petscblaslapack.h" 1823ac90fabeSBarry Smith #undef __FUNCT__ 1824ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 1825f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 1826ac90fabeSBarry Smith { 1827dfbe8321SBarry Smith PetscErrorCode ierr; 1828b1d57f15SBarry Smith PetscInt i; 1829ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data; 18304ce68768SBarry Smith PetscBLASInt bnz,one=1; 1831ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 1832ac90fabeSBarry Smith 1833ac90fabeSBarry Smith PetscFunctionBegin; 1834ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 1835f4df32b1SMatthew Knepley PetscScalar alpha = a; 1836ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->A->data; 1837ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->A->data; 18380805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1839f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1840ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->B->data; 1841ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->B->data; 18420805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1843f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1844a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 1845f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 1846c537a176SHong Zhang 1847c537a176SHong Zhang x = (Mat_SeqAIJ *)xx->B->data; 1848a30b2313SHong Zhang y = (Mat_SeqAIJ *)yy->B->data; 1849a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 1850a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 1851a30b2313SHong Zhang ierr = MatDestroy(y->XtoY);CHKERRQ(ierr); 1852c537a176SHong Zhang } 1853a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 1854d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 1855a30b2313SHong Zhang y->XtoY = xx->B; 1856407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 1857c537a176SHong Zhang } 1858f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 1859ac90fabeSBarry Smith } else { 1860f4df32b1SMatthew Knepley ierr = MatAXPY_Basic(Y,a,X,str);CHKERRQ(ierr); 1861ac90fabeSBarry Smith } 1862ac90fabeSBarry Smith PetscFunctionReturn(0); 1863ac90fabeSBarry Smith } 1864ac90fabeSBarry Smith 1865354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat); 1866354c94deSBarry Smith 1867354c94deSBarry Smith #undef __FUNCT__ 1868354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 1869354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat) 1870354c94deSBarry Smith { 1871354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 1872354c94deSBarry Smith PetscErrorCode ierr; 1873354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 1874354c94deSBarry Smith 1875354c94deSBarry Smith PetscFunctionBegin; 1876354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 1877354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 1878354c94deSBarry Smith #else 1879354c94deSBarry Smith PetscFunctionBegin; 1880354c94deSBarry Smith #endif 1881354c94deSBarry Smith PetscFunctionReturn(0); 1882354c94deSBarry Smith } 1883354c94deSBarry Smith 188499cafbc1SBarry Smith #undef __FUNCT__ 188599cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 188699cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 188799cafbc1SBarry Smith { 188899cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 188999cafbc1SBarry Smith PetscErrorCode ierr; 189099cafbc1SBarry Smith 189199cafbc1SBarry Smith PetscFunctionBegin; 189299cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 189399cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 189499cafbc1SBarry Smith PetscFunctionReturn(0); 189599cafbc1SBarry Smith } 189699cafbc1SBarry Smith 189799cafbc1SBarry Smith #undef __FUNCT__ 189899cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 189999cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 190099cafbc1SBarry Smith { 190199cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 190299cafbc1SBarry Smith PetscErrorCode ierr; 190399cafbc1SBarry Smith 190499cafbc1SBarry Smith PetscFunctionBegin; 190599cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 190699cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 190799cafbc1SBarry Smith PetscFunctionReturn(0); 190899cafbc1SBarry Smith } 190999cafbc1SBarry Smith 1910103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 1911103bf8bdSMatthew Knepley 1912103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 1913a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 1914a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 1915a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 1916103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 1917a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 1918d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 1919103bf8bdSMatthew Knepley 1920103bf8bdSMatthew Knepley #undef __FUNCT__ 1921103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 1922103bf8bdSMatthew Knepley /* 1923103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1924103bf8bdSMatthew Knepley */ 19250481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 1926103bf8bdSMatthew Knepley { 1927a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 1928a2c909beSMatthew Knepley 1929a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1930a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 1931a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 1932a2c909beSMatthew Knepley 1933103bf8bdSMatthew Knepley PetscTruth row_identity, col_identity; 1934776b82aeSLisandro Dalcin PetscContainer c; 1935103bf8bdSMatthew Knepley PetscInt m, n, M, N; 1936103bf8bdSMatthew Knepley PetscErrorCode ierr; 1937103bf8bdSMatthew Knepley 1938103bf8bdSMatthew Knepley PetscFunctionBegin; 1939103bf8bdSMatthew Knepley if (info->levels != 0) SETERRQ(PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 1940103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 1941103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 1942103bf8bdSMatthew Knepley if (!row_identity || !col_identity) { 1943103bf8bdSMatthew Knepley SETERRQ(PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 1944103bf8bdSMatthew Knepley } 1945103bf8bdSMatthew Knepley 1946103bf8bdSMatthew Knepley process_group_type pg; 1947a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1948a2c909beSMatthew Knepley lgraph_type* lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 1949a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1950a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1951a2c909beSMatthew Knepley 1952103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 1953a2c909beSMatthew Knepley ilu_permuted(level_graph); 1954103bf8bdSMatthew Knepley 1955103bf8bdSMatthew Knepley /* put together the new matrix */ 19567adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr); 1957103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 1958103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 1959719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 1960719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 1961719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1962719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1963103bf8bdSMatthew Knepley 19647adad957SLisandro Dalcin ierr = PetscContainerCreate(((PetscObject)A)->comm, &c); 1965776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 1966719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 1967103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1968103bf8bdSMatthew Knepley } 1969103bf8bdSMatthew Knepley 1970103bf8bdSMatthew Knepley #undef __FUNCT__ 1971103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 19720481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 1973103bf8bdSMatthew Knepley { 1974103bf8bdSMatthew Knepley PetscFunctionBegin; 1975103bf8bdSMatthew Knepley PetscFunctionReturn(0); 1976103bf8bdSMatthew Knepley } 1977103bf8bdSMatthew Knepley 1978103bf8bdSMatthew Knepley #undef __FUNCT__ 1979103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 1980103bf8bdSMatthew Knepley /* 1981103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1982103bf8bdSMatthew Knepley */ 1983103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 1984103bf8bdSMatthew Knepley { 1985a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1986a2c909beSMatthew Knepley 1987a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1988a2c909beSMatthew Knepley lgraph_type* lgraph_p; 1989776b82aeSLisandro Dalcin PetscContainer c; 1990103bf8bdSMatthew Knepley PetscErrorCode ierr; 1991103bf8bdSMatthew Knepley 1992103bf8bdSMatthew Knepley PetscFunctionBegin; 1993103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr); 1994776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr); 1995103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 1996a2c909beSMatthew Knepley 1997a2c909beSMatthew Knepley PetscScalar* array_x; 1998a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 1999a2c909beSMatthew Knepley PetscInt sx; 2000a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 2001a2c909beSMatthew Knepley 2002a2c909beSMatthew Knepley PetscScalar* array_b; 2003a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 2004a2c909beSMatthew Knepley PetscInt sb; 2005a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 2006a2c909beSMatthew Knepley 2007a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2008a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2009a2c909beSMatthew Knepley 2010a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 2011a2c909beSMatthew Knepley array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]), 2012a2c909beSMatthew Knepley ref_x(array_x, boost::extents[num_vertices(graph)]); 2013a2c909beSMatthew Knepley 2014a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 2015a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 2016a2c909beSMatthew Knepley gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)), 2017a2c909beSMatthew Knepley vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 2018a2c909beSMatthew Knepley 2019a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 2020a2c909beSMatthew Knepley 2021103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2022103bf8bdSMatthew Knepley } 2023103bf8bdSMatthew Knepley #endif 2024103bf8bdSMatthew Knepley 202569db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */ 202669db28dcSHong Zhang PetscInt nzlocal,nsends,nrecvs; 2027aa5bb8c0SSatish Balay PetscMPIInt *send_rank; 2028aa5bb8c0SSatish Balay PetscInt *sbuf_nz,*sbuf_j,**rbuf_j; 202969db28dcSHong Zhang PetscScalar *sbuf_a,**rbuf_a; 203069db28dcSHong Zhang PetscErrorCode (*MatDestroy)(Mat); 203169db28dcSHong Zhang } Mat_Redundant; 203269db28dcSHong Zhang 203369db28dcSHong Zhang #undef __FUNCT__ 203469db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant" 203569db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr) 203669db28dcSHong Zhang { 203769db28dcSHong Zhang PetscErrorCode ierr; 203869db28dcSHong Zhang Mat_Redundant *redund=(Mat_Redundant*)ptr; 203969db28dcSHong Zhang PetscInt i; 204069db28dcSHong Zhang 204169db28dcSHong Zhang PetscFunctionBegin; 204269db28dcSHong Zhang ierr = PetscFree(redund->send_rank);CHKERRQ(ierr); 204369db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 204469db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 204569db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++){ 204669db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 204769db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 204869db28dcSHong Zhang } 204969db28dcSHong Zhang ierr = PetscFree3(redund->sbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 205069db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 205169db28dcSHong Zhang PetscFunctionReturn(0); 205269db28dcSHong Zhang } 205369db28dcSHong Zhang 205469db28dcSHong Zhang #undef __FUNCT__ 205569db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 205669db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 205769db28dcSHong Zhang { 205869db28dcSHong Zhang PetscErrorCode ierr; 205969db28dcSHong Zhang PetscContainer container; 206069db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 206169db28dcSHong Zhang 206269db28dcSHong Zhang PetscFunctionBegin; 206369db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 206469db28dcSHong Zhang if (container) { 206569db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 206669db28dcSHong Zhang } else { 206769db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 206869db28dcSHong Zhang } 206969db28dcSHong Zhang A->ops->destroy = redund->MatDestroy; 207069db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr); 207169db28dcSHong Zhang ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 207269db28dcSHong Zhang ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 207369db28dcSHong Zhang PetscFunctionReturn(0); 207469db28dcSHong Zhang } 207569db28dcSHong Zhang 207669db28dcSHong Zhang #undef __FUNCT__ 207769db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 207869db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant) 207969db28dcSHong Zhang { 208069db28dcSHong Zhang PetscMPIInt rank,size; 20817adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mat)->comm; 208269db28dcSHong Zhang PetscErrorCode ierr; 208369db28dcSHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0; 208469db28dcSHong Zhang PetscMPIInt *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL; 2085d0f46423SBarry Smith PetscInt *rowrange=mat->rmap->range; 208669db28dcSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 208769db28dcSHong Zhang Mat A=aij->A,B=aij->B,C=*matredundant; 208869db28dcSHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 208969db28dcSHong Zhang PetscScalar *sbuf_a; 209069db28dcSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2091d0f46423SBarry Smith PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 2092d0f46423SBarry Smith PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N; 209369db28dcSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2094a77337e4SBarry Smith MatScalar *aworkA,*aworkB; 2095a77337e4SBarry Smith PetscScalar *vals; 209669db28dcSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 209769db28dcSHong Zhang MPI_Request *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL, 209869db28dcSHong Zhang *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL; 209969db28dcSHong Zhang MPI_Status recv_status,*send_status; 210069db28dcSHong Zhang PetscInt *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count; 210169db28dcSHong Zhang PetscInt **rbuf_j=PETSC_NULL; 210269db28dcSHong Zhang PetscScalar **rbuf_a=PETSC_NULL; 210369db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 210469db28dcSHong Zhang PetscContainer container; 210569db28dcSHong Zhang 210669db28dcSHong Zhang PetscFunctionBegin; 210769db28dcSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 210869db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 210969db28dcSHong Zhang 211069db28dcSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 211169db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2112d0f46423SBarry Smith if (M != N || M != mat->rmap->N) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 211369db28dcSHong Zhang ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr); 211469db28dcSHong Zhang if (M != N || M != mlocal_sub) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size"); 211569db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 211669db28dcSHong Zhang if (container) { 211769db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 211869db28dcSHong Zhang } else { 211969db28dcSHong Zhang SETERRQ(PETSC_ERR_PLIB,"Container does not exit"); 212069db28dcSHong Zhang } 212169db28dcSHong Zhang if (nzlocal != redund->nzlocal) SETERRQ(PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 212269db28dcSHong Zhang 212369db28dcSHong Zhang nsends = redund->nsends; 212469db28dcSHong Zhang nrecvs = redund->nrecvs; 212569db28dcSHong Zhang send_rank = redund->send_rank; recv_rank = send_rank + size; 212669db28dcSHong Zhang sbuf_nz = redund->sbuf_nz; rbuf_nz = sbuf_nz + nsends; 212769db28dcSHong Zhang sbuf_j = redund->sbuf_j; 212869db28dcSHong Zhang sbuf_a = redund->sbuf_a; 212969db28dcSHong Zhang rbuf_j = redund->rbuf_j; 213069db28dcSHong Zhang rbuf_a = redund->rbuf_a; 213169db28dcSHong Zhang } 213269db28dcSHong Zhang 213369db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 213469db28dcSHong Zhang PetscMPIInt subrank,subsize; 213569db28dcSHong Zhang PetscInt nleftover,np_subcomm; 213669db28dcSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 213769db28dcSHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 213869db28dcSHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 213969db28dcSHong Zhang ierr = PetscMalloc((2*size+1)*sizeof(PetscMPIInt),&send_rank); 214069db28dcSHong Zhang recv_rank = send_rank + size; 214169db28dcSHong Zhang np_subcomm = size/nsubcomm; 214269db28dcSHong Zhang nleftover = size - nsubcomm*np_subcomm; 214369db28dcSHong Zhang nsends = 0; nrecvs = 0; 214469db28dcSHong Zhang for (i=0; i<size; i++){ /* i=rank*/ 214569db28dcSHong Zhang if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */ 214669db28dcSHong Zhang send_rank[nsends] = i; nsends++; 214769db28dcSHong Zhang recv_rank[nrecvs++] = i; 214869db28dcSHong Zhang } 214969db28dcSHong Zhang } 215069db28dcSHong Zhang if (rank >= size - nleftover){/* this proc is a leftover processor */ 215169db28dcSHong Zhang i = size-nleftover-1; 215269db28dcSHong Zhang j = 0; 215369db28dcSHong Zhang while (j < nsubcomm - nleftover){ 215469db28dcSHong Zhang send_rank[nsends++] = i; 215569db28dcSHong Zhang i--; j++; 215669db28dcSHong Zhang } 215769db28dcSHong Zhang } 215869db28dcSHong Zhang 215969db28dcSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */ 216069db28dcSHong Zhang for (i=0; i<nleftover; i++){ 216169db28dcSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 216269db28dcSHong Zhang } 216369db28dcSHong Zhang } 216469db28dcSHong Zhang 216569db28dcSHong Zhang /* allocate sbuf_j, sbuf_a */ 216669db28dcSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 216769db28dcSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 216869db28dcSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 216969db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 217069db28dcSHong Zhang 217169db28dcSHong Zhang /* copy mat's local entries into the buffers */ 217269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 217369db28dcSHong Zhang rownz_max = 0; 217469db28dcSHong Zhang rptr = sbuf_j; 217569db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 217669db28dcSHong Zhang vals = sbuf_a; 217769db28dcSHong Zhang rptr[0] = 0; 217869db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 217969db28dcSHong Zhang row = i + rstart; 218069db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 218169db28dcSHong Zhang ncols = nzA + nzB; 218269db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 218369db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 218469db28dcSHong Zhang /* load the column indices for this row into cols */ 218569db28dcSHong Zhang lwrite = 0; 218669db28dcSHong Zhang for (l=0; l<nzB; l++) { 218769db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart){ 218869db28dcSHong Zhang vals[lwrite] = aworkB[l]; 218969db28dcSHong Zhang cols[lwrite++] = ctmp; 219069db28dcSHong Zhang } 219169db28dcSHong Zhang } 219269db28dcSHong Zhang for (l=0; l<nzA; l++){ 219369db28dcSHong Zhang vals[lwrite] = aworkA[l]; 219469db28dcSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 219569db28dcSHong Zhang } 219669db28dcSHong Zhang for (l=0; l<nzB; l++) { 219769db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend){ 219869db28dcSHong Zhang vals[lwrite] = aworkB[l]; 219969db28dcSHong Zhang cols[lwrite++] = ctmp; 220069db28dcSHong Zhang } 220169db28dcSHong Zhang } 220269db28dcSHong Zhang vals += ncols; 220369db28dcSHong Zhang cols += ncols; 220469db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 220569db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 220669db28dcSHong Zhang } 220769db28dcSHong Zhang if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(1, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 220869db28dcSHong Zhang } else { /* only copy matrix values into sbuf_a */ 220969db28dcSHong Zhang rptr = sbuf_j; 221069db28dcSHong Zhang vals = sbuf_a; 221169db28dcSHong Zhang rptr[0] = 0; 221269db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 221369db28dcSHong Zhang row = i + rstart; 221469db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 221569db28dcSHong Zhang ncols = nzA + nzB; 221669db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 221769db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 221869db28dcSHong Zhang lwrite = 0; 221969db28dcSHong Zhang for (l=0; l<nzB; l++) { 222069db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 222169db28dcSHong Zhang } 222269db28dcSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 222369db28dcSHong Zhang for (l=0; l<nzB; l++) { 222469db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 222569db28dcSHong Zhang } 222669db28dcSHong Zhang vals += ncols; 222769db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 222869db28dcSHong Zhang } 222969db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 223069db28dcSHong Zhang 223169db28dcSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 223269db28dcSHong Zhang /*--------------------------------------------------*/ 223369db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 223469db28dcSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 223569db28dcSHong Zhang s_waits2 = s_waits3 + nsends; 223669db28dcSHong Zhang s_waits1 = s_waits2 + nsends; 223769db28dcSHong Zhang r_waits1 = s_waits1 + nsends; 223869db28dcSHong Zhang r_waits2 = r_waits1 + nrecvs; 223969db28dcSHong Zhang r_waits3 = r_waits2 + nrecvs; 224069db28dcSHong Zhang } else { 224169db28dcSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 224269db28dcSHong Zhang r_waits3 = s_waits3 + nsends; 224369db28dcSHong Zhang } 224469db28dcSHong Zhang 224569db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 224669db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 224769db28dcSHong Zhang /* get new tags to keep the communication clean */ 224869db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 224969db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 225069db28dcSHong Zhang ierr = PetscMalloc3(nsends+nrecvs+1,PetscInt,&sbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 225169db28dcSHong Zhang rbuf_nz = sbuf_nz + nsends; 225269db28dcSHong Zhang 225369db28dcSHong Zhang /* post receives of other's nzlocal */ 225469db28dcSHong Zhang for (i=0; i<nrecvs; i++){ 225569db28dcSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 225669db28dcSHong Zhang } 225769db28dcSHong Zhang /* send nzlocal to others */ 225869db28dcSHong Zhang for (i=0; i<nsends; i++){ 225969db28dcSHong Zhang sbuf_nz[i] = nzlocal; 226069db28dcSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 226169db28dcSHong Zhang } 226269db28dcSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 226369db28dcSHong Zhang count = nrecvs; 226469db28dcSHong Zhang while (count) { 226569db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 226669db28dcSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 226769db28dcSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 226869db28dcSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 226969db28dcSHong Zhang 227069db28dcSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 227169db28dcSHong Zhang rbuf_nz[imdex] += i + 2; 227269db28dcSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 227369db28dcSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 227469db28dcSHong Zhang count--; 227569db28dcSHong Zhang } 227669db28dcSHong Zhang /* wait on sends of nzlocal */ 227769db28dcSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 227869db28dcSHong Zhang /* send mat->i,j to others, and recv from other's */ 227969db28dcSHong Zhang /*------------------------------------------------*/ 228069db28dcSHong Zhang for (i=0; i<nsends; i++){ 228169db28dcSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 228269db28dcSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 228369db28dcSHong Zhang } 228469db28dcSHong Zhang /* wait on receives of mat->i,j */ 228569db28dcSHong Zhang /*------------------------------*/ 228669db28dcSHong Zhang count = nrecvs; 228769db28dcSHong Zhang while (count) { 228869db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 228969db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 229069db28dcSHong Zhang count--; 229169db28dcSHong Zhang } 229269db28dcSHong Zhang /* wait on sends of mat->i,j */ 229369db28dcSHong Zhang /*---------------------------*/ 229469db28dcSHong Zhang if (nsends) { 229569db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 229669db28dcSHong Zhang } 229769db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 229869db28dcSHong Zhang 229969db28dcSHong Zhang /* post receives, send and receive mat->a */ 230069db28dcSHong Zhang /*----------------------------------------*/ 230169db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 230269db28dcSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 230369db28dcSHong Zhang } 230469db28dcSHong Zhang for (i=0; i<nsends; i++){ 230569db28dcSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 230669db28dcSHong Zhang } 230769db28dcSHong Zhang count = nrecvs; 230869db28dcSHong Zhang while (count) { 230969db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 231069db28dcSHong Zhang if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 231169db28dcSHong Zhang count--; 231269db28dcSHong Zhang } 231369db28dcSHong Zhang if (nsends) { 231469db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 231569db28dcSHong Zhang } 231669db28dcSHong Zhang 231769db28dcSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 231869db28dcSHong Zhang 231969db28dcSHong Zhang /* create redundant matrix */ 232069db28dcSHong Zhang /*-------------------------*/ 232169db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 232269db28dcSHong Zhang /* compute rownz_max for preallocation */ 232369db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 232469db28dcSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 232569db28dcSHong Zhang rptr = rbuf_j[imdex]; 232669db28dcSHong Zhang for (i=0; i<j; i++){ 232769db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 232869db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 232969db28dcSHong Zhang } 233069db28dcSHong Zhang } 233169db28dcSHong Zhang 233269db28dcSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 233369db28dcSHong Zhang ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 233469db28dcSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 233569db28dcSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr); 233669db28dcSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr); 233769db28dcSHong Zhang } else { 233869db28dcSHong Zhang C = *matredundant; 233969db28dcSHong Zhang } 234069db28dcSHong Zhang 234169db28dcSHong Zhang /* insert local matrix entries */ 234269db28dcSHong Zhang rptr = sbuf_j; 234369db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 234469db28dcSHong Zhang vals = sbuf_a; 234569db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 234669db28dcSHong Zhang row = i + rstart; 234769db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 234869db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 234969db28dcSHong Zhang vals += ncols; 235069db28dcSHong Zhang cols += ncols; 235169db28dcSHong Zhang } 235269db28dcSHong Zhang /* insert received matrix entries */ 235369db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 235469db28dcSHong Zhang rstart = rowrange[recv_rank[imdex]]; 235569db28dcSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 235669db28dcSHong Zhang rptr = rbuf_j[imdex]; 235769db28dcSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 235869db28dcSHong Zhang vals = rbuf_a[imdex]; 235969db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 236069db28dcSHong Zhang row = i + rstart; 236169db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 236269db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 236369db28dcSHong Zhang vals += ncols; 236469db28dcSHong Zhang cols += ncols; 236569db28dcSHong Zhang } 236669db28dcSHong Zhang } 236769db28dcSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 236869db28dcSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 236969db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2370d0f46423SBarry Smith if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N); 237169db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 237269db28dcSHong Zhang PetscContainer container; 237369db28dcSHong Zhang *matredundant = C; 237469db28dcSHong Zhang /* create a supporting struct and attach it to C for reuse */ 237538f2d2fdSLisandro Dalcin ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 237669db28dcSHong Zhang ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 237769db28dcSHong Zhang ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr); 237869db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr); 237969db28dcSHong Zhang ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr); 238069db28dcSHong Zhang 238169db28dcSHong Zhang redund->nzlocal = nzlocal; 238269db28dcSHong Zhang redund->nsends = nsends; 238369db28dcSHong Zhang redund->nrecvs = nrecvs; 238469db28dcSHong Zhang redund->send_rank = send_rank; 238569db28dcSHong Zhang redund->sbuf_nz = sbuf_nz; 238669db28dcSHong Zhang redund->sbuf_j = sbuf_j; 238769db28dcSHong Zhang redund->sbuf_a = sbuf_a; 238869db28dcSHong Zhang redund->rbuf_j = rbuf_j; 238969db28dcSHong Zhang redund->rbuf_a = rbuf_a; 239069db28dcSHong Zhang 239169db28dcSHong Zhang redund->MatDestroy = C->ops->destroy; 239269db28dcSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 239369db28dcSHong Zhang } 239469db28dcSHong Zhang PetscFunctionReturn(0); 239569db28dcSHong Zhang } 239669db28dcSHong Zhang 239703bc72f1SMatthew Knepley #undef __FUNCT__ 2398c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2399c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2400c91732d9SHong Zhang { 2401c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2402c91732d9SHong Zhang PetscErrorCode ierr; 2403c91732d9SHong Zhang PetscInt i,*idxb = 0; 2404c91732d9SHong Zhang PetscScalar *va,*vb; 2405c91732d9SHong Zhang Vec vtmp; 2406c91732d9SHong Zhang 2407c91732d9SHong Zhang PetscFunctionBegin; 2408c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 2409c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2410c91732d9SHong Zhang if (idx) { 2411192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 2412d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2413c91732d9SHong Zhang } 2414c91732d9SHong Zhang } 2415c91732d9SHong Zhang 2416d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2417c91732d9SHong Zhang if (idx) { 2418d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2419c91732d9SHong Zhang } 2420c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2421c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2422c91732d9SHong Zhang 2423d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++){ 2424c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 2425c91732d9SHong Zhang va[i] = vb[i]; 2426c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 2427c91732d9SHong Zhang } 2428c91732d9SHong Zhang } 2429c91732d9SHong Zhang 2430c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2431c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2432c91732d9SHong Zhang if (idxb) { 2433c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 2434c91732d9SHong Zhang } 2435c91732d9SHong Zhang ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2436c91732d9SHong Zhang PetscFunctionReturn(0); 2437c91732d9SHong Zhang } 2438c91732d9SHong Zhang 2439c91732d9SHong Zhang #undef __FUNCT__ 2440c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 2441c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2442c87e5d42SMatthew Knepley { 2443c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2444c87e5d42SMatthew Knepley PetscErrorCode ierr; 2445c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 2446c87e5d42SMatthew Knepley PetscScalar *va,*vb; 2447c87e5d42SMatthew Knepley Vec vtmp; 2448c87e5d42SMatthew Knepley 2449c87e5d42SMatthew Knepley PetscFunctionBegin; 2450c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 2451c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2452c87e5d42SMatthew Knepley if (idx) { 2453c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 2454c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2455c87e5d42SMatthew Knepley } 2456c87e5d42SMatthew Knepley } 2457c87e5d42SMatthew Knepley 2458c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2459c87e5d42SMatthew Knepley if (idx) { 2460c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2461c87e5d42SMatthew Knepley } 2462c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2463c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2464c87e5d42SMatthew Knepley 2465c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++){ 2466c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 2467c87e5d42SMatthew Knepley va[i] = vb[i]; 2468c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 2469c87e5d42SMatthew Knepley } 2470c87e5d42SMatthew Knepley } 2471c87e5d42SMatthew Knepley 2472c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2473c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2474c87e5d42SMatthew Knepley if (idxb) { 2475c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 2476c87e5d42SMatthew Knepley } 2477c87e5d42SMatthew Knepley ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2478c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2479c87e5d42SMatthew Knepley } 2480c87e5d42SMatthew Knepley 2481c87e5d42SMatthew Knepley #undef __FUNCT__ 248203bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 248303bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 248403bc72f1SMatthew Knepley { 248503bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2486d0f46423SBarry Smith PetscInt n = A->rmap->n; 2487d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 248803bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 248903bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 249003bc72f1SMatthew Knepley Vec diagV, offdiagV; 249103bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 249203bc72f1SMatthew Knepley PetscInt r; 249303bc72f1SMatthew Knepley PetscErrorCode ierr; 249403bc72f1SMatthew Knepley 249503bc72f1SMatthew Knepley PetscFunctionBegin; 249603bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2497e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2498e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 249903bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 250003bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 250103bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 250203bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 250303bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 250403bc72f1SMatthew Knepley for(r = 0; r < n; ++r) { 2505028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 250603bc72f1SMatthew Knepley a[r] = diagA[r]; 250703bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 250803bc72f1SMatthew Knepley } else { 250903bc72f1SMatthew Knepley a[r] = offdiagA[r]; 251003bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 251103bc72f1SMatthew Knepley } 251203bc72f1SMatthew Knepley } 251303bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 251403bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 251503bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 251603bc72f1SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 251703bc72f1SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 251803bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 251903bc72f1SMatthew Knepley PetscFunctionReturn(0); 252003bc72f1SMatthew Knepley } 252103bc72f1SMatthew Knepley 25225494a064SHong Zhang #undef __FUNCT__ 2523c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 2524c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2525c87e5d42SMatthew Knepley { 2526c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2527c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 2528c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 2529c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 2530c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 2531c87e5d42SMatthew Knepley Vec diagV, offdiagV; 2532c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 2533c87e5d42SMatthew Knepley PetscInt r; 2534c87e5d42SMatthew Knepley PetscErrorCode ierr; 2535c87e5d42SMatthew Knepley 2536c87e5d42SMatthew Knepley PetscFunctionBegin; 2537c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2538c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2539c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 2540c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 2541c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 2542c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 2543c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 2544c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2545c87e5d42SMatthew Knepley for(r = 0; r < n; ++r) { 2546c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 2547c87e5d42SMatthew Knepley a[r] = diagA[r]; 2548c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 2549c87e5d42SMatthew Knepley } else { 2550c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 2551c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 2552c87e5d42SMatthew Knepley } 2553c87e5d42SMatthew Knepley } 2554c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 2555c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 2556c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2557c87e5d42SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 2558c87e5d42SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 2559c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 2560c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2561c87e5d42SMatthew Knepley } 2562c87e5d42SMatthew Knepley 2563c87e5d42SMatthew Knepley #undef __FUNCT__ 2564829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ" 2565f6d58c54SBarry Smith PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat) 25665494a064SHong Zhang { 25675494a064SHong Zhang PetscErrorCode ierr; 2568f6d58c54SBarry Smith Mat *dummy; 25695494a064SHong Zhang 25705494a064SHong Zhang PetscFunctionBegin; 2571f6d58c54SBarry Smith ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr); 2572f6d58c54SBarry Smith *newmat = *dummy; 2573f6d58c54SBarry Smith ierr = PetscFree(dummy);CHKERRQ(ierr); 25745494a064SHong Zhang PetscFunctionReturn(0); 25755494a064SHong Zhang } 25765494a064SHong Zhang 25773acb8795SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*); 25788a729477SBarry Smith /* -------------------------------------------------------------------*/ 2579cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 2580cda55fadSBarry Smith MatGetRow_MPIAIJ, 2581cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 2582cda55fadSBarry Smith MatMult_MPIAIJ, 258397304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 25847c922b88SBarry Smith MatMultTranspose_MPIAIJ, 25857c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 2586103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2587103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 2588103bf8bdSMatthew Knepley #else 2589cda55fadSBarry Smith 0, 2590103bf8bdSMatthew Knepley #endif 2591cda55fadSBarry Smith 0, 2592cda55fadSBarry Smith 0, 259397304618SKris Buschelman /*10*/ 0, 2594cda55fadSBarry Smith 0, 2595cda55fadSBarry Smith 0, 259641f059aeSBarry Smith MatSOR_MPIAIJ, 2597b7c46309SBarry Smith MatTranspose_MPIAIJ, 259897304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 2599cda55fadSBarry Smith MatEqual_MPIAIJ, 2600cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 2601cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 2602cda55fadSBarry Smith MatNorm_MPIAIJ, 260397304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 2604cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 2605cda55fadSBarry Smith MatSetOption_MPIAIJ, 2606cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 2607d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ, 2608cda55fadSBarry Smith 0, 2609103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2610719d5645SBarry Smith 0, 2611103bf8bdSMatthew Knepley #else 2612cda55fadSBarry Smith 0, 2613103bf8bdSMatthew Knepley #endif 2614cda55fadSBarry Smith 0, 2615cda55fadSBarry Smith 0, 2616d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIAIJ, 2617103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2618719d5645SBarry Smith 0, 2619103bf8bdSMatthew Knepley #else 2620cda55fadSBarry Smith 0, 2621103bf8bdSMatthew Knepley #endif 2622cda55fadSBarry Smith 0, 2623cda55fadSBarry Smith 0, 2624cda55fadSBarry Smith 0, 2625d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ, 2626cda55fadSBarry Smith 0, 2627cda55fadSBarry Smith 0, 2628cda55fadSBarry Smith 0, 2629cda55fadSBarry Smith 0, 2630d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ, 2631cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 2632cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 2633cda55fadSBarry Smith MatGetValues_MPIAIJ, 2634cb5b572fSBarry Smith MatCopy_MPIAIJ, 2635d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ, 2636cda55fadSBarry Smith MatScale_MPIAIJ, 2637cda55fadSBarry Smith 0, 2638cda55fadSBarry Smith 0, 2639cda55fadSBarry Smith 0, 2640d519adbfSMatthew Knepley /*49*/ MatSetBlockSize_MPIAIJ, 2641cda55fadSBarry Smith 0, 2642cda55fadSBarry Smith 0, 2643cda55fadSBarry Smith 0, 2644cda55fadSBarry Smith 0, 2645d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ, 2646cda55fadSBarry Smith 0, 2647cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 264842e855d1Svictor MatPermute_MPIAIJ, 2649cda55fadSBarry Smith 0, 2650d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ, 2651e03a110bSBarry Smith MatDestroy_MPIAIJ, 2652e03a110bSBarry Smith MatView_MPIAIJ, 2653357abbc8SBarry Smith 0, 2654a2243be0SBarry Smith 0, 2655d519adbfSMatthew Knepley /*64*/ 0, 2656a2243be0SBarry Smith 0, 2657a2243be0SBarry Smith 0, 2658a2243be0SBarry Smith 0, 2659a2243be0SBarry Smith 0, 2660d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ, 2661c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 2662a2243be0SBarry Smith 0, 2663a2243be0SBarry Smith MatSetColoring_MPIAIJ, 2664dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 2665779c1a83SBarry Smith MatSetValuesAdic_MPIAIJ, 2666dcf5cc72SBarry Smith #else 2667dcf5cc72SBarry Smith 0, 2668dcf5cc72SBarry Smith #endif 266997304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 26703acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ, 267197304618SKris Buschelman 0, 267297304618SKris Buschelman 0, 267397304618SKris Buschelman 0, 267497304618SKris Buschelman 0, 267597304618SKris Buschelman /*80*/ 0, 267697304618SKris Buschelman 0, 267797304618SKris Buschelman 0, 2678d519adbfSMatthew Knepley /*83*/ MatLoad_MPIAIJ, 26796284ec50SHong Zhang 0, 26806284ec50SHong Zhang 0, 26816284ec50SHong Zhang 0, 26826284ec50SHong Zhang 0, 2683865e5f61SKris Buschelman 0, 2684d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ, 268526be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 268626be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 26877a7894deSKris Buschelman MatPtAP_Basic, 26887a7894deSKris Buschelman MatPtAPSymbolic_MPIAIJ, 2689d519adbfSMatthew Knepley /*94*/ MatPtAPNumeric_MPIAIJ, 26907a7894deSKris Buschelman 0, 26917a7894deSKris Buschelman 0, 26927a7894deSKris Buschelman 0, 26937a7894deSKris Buschelman 0, 2694d519adbfSMatthew Knepley /*99*/ 0, 2695865e5f61SKris Buschelman MatPtAPSymbolic_MPIAIJ_MPIAIJ, 26967a7894deSKris Buschelman MatPtAPNumeric_MPIAIJ_MPIAIJ, 26972fd7e33dSBarry Smith MatConjugate_MPIAIJ, 26982fd7e33dSBarry Smith 0, 2699d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ, 270099cafbc1SBarry Smith MatRealPart_MPIAIJ, 270169db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 270269db28dcSHong Zhang 0, 270369db28dcSHong Zhang 0, 2704d519adbfSMatthew Knepley /*109*/0, 270503bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 27065494a064SHong Zhang MatGetRowMin_MPIAIJ, 27075494a064SHong Zhang 0, 27085494a064SHong Zhang 0, 2709bd0c2dcbSBarry Smith /*114*/MatGetSeqNonzerostructure_MPIAIJ, 2710bd0c2dcbSBarry Smith 0, 2711bd0c2dcbSBarry Smith 0, 2712bd0c2dcbSBarry Smith 0, 2713bd0c2dcbSBarry Smith 0, 2714bd0c2dcbSBarry Smith 0 2715bd0c2dcbSBarry Smith }; 271636ce4990SBarry Smith 27172e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 27182e8a6d31SBarry Smith 2719fb2e594dSBarry Smith EXTERN_C_BEGIN 27204a2ae208SSatish Balay #undef __FUNCT__ 27214a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 2722be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat) 27232e8a6d31SBarry Smith { 27242e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2725dfbe8321SBarry Smith PetscErrorCode ierr; 27262e8a6d31SBarry Smith 27272e8a6d31SBarry Smith PetscFunctionBegin; 27282e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 27292e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 27302e8a6d31SBarry Smith PetscFunctionReturn(0); 27312e8a6d31SBarry Smith } 2732fb2e594dSBarry Smith EXTERN_C_END 27332e8a6d31SBarry Smith 2734fb2e594dSBarry Smith EXTERN_C_BEGIN 27354a2ae208SSatish Balay #undef __FUNCT__ 27364a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 2737be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat) 27382e8a6d31SBarry Smith { 27392e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2740dfbe8321SBarry Smith PetscErrorCode ierr; 27412e8a6d31SBarry Smith 27422e8a6d31SBarry Smith PetscFunctionBegin; 27432e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 27442e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 27452e8a6d31SBarry Smith PetscFunctionReturn(0); 27462e8a6d31SBarry Smith } 2747fb2e594dSBarry Smith EXTERN_C_END 27488a729477SBarry Smith 2749e090d566SSatish Balay #include "petscpc.h" 275027508adbSBarry Smith EXTERN_C_BEGIN 27514a2ae208SSatish Balay #undef __FUNCT__ 2752a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 2753be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 2754a23d5eceSKris Buschelman { 2755a23d5eceSKris Buschelman Mat_MPIAIJ *b; 2756dfbe8321SBarry Smith PetscErrorCode ierr; 2757b1d57f15SBarry Smith PetscInt i; 2758a23d5eceSKris Buschelman 2759a23d5eceSKris Buschelman PetscFunctionBegin; 2760a23d5eceSKris Buschelman if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5; 2761a23d5eceSKris Buschelman if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2; 276277431f27SBarry Smith if (d_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 276377431f27SBarry Smith if (o_nz < 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 2764899cda47SBarry Smith 276526283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 276626283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 276726283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 276826283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 2769a23d5eceSKris Buschelman if (d_nnz) { 2770d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 277177431f27SBarry Smith if (d_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]); 2772a23d5eceSKris Buschelman } 2773a23d5eceSKris Buschelman } 2774a23d5eceSKris Buschelman if (o_nnz) { 2775d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 277677431f27SBarry Smith if (o_nnz[i] < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]); 2777a23d5eceSKris Buschelman } 2778a23d5eceSKris Buschelman } 2779a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 2780899cda47SBarry Smith 2781526dfc15SBarry Smith if (!B->preallocated) { 2782899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 2783899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 2784d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 2785899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 2786899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 2787899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 2788d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 2789899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 2790899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 2791526dfc15SBarry Smith } 2792899cda47SBarry Smith 2793c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 2794c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 2795526dfc15SBarry Smith B->preallocated = PETSC_TRUE; 2796a23d5eceSKris Buschelman PetscFunctionReturn(0); 2797a23d5eceSKris Buschelman } 2798a23d5eceSKris Buschelman EXTERN_C_END 2799a23d5eceSKris Buschelman 28004a2ae208SSatish Balay #undef __FUNCT__ 28014a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 2802dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 2803d6dfbf8fSBarry Smith { 2804d6dfbf8fSBarry Smith Mat mat; 2805416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 2806dfbe8321SBarry Smith PetscErrorCode ierr; 2807d6dfbf8fSBarry Smith 28083a40ed3dSBarry Smith PetscFunctionBegin; 2809416022c9SBarry Smith *newmat = 0; 28107adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 2811d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 28127adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 28131d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2814273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 2815e1b6402fSHong Zhang 2816d6dfbf8fSBarry Smith mat->factor = matin->factor; 2817d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 2818c456f294SBarry Smith mat->assembled = PETSC_TRUE; 2819e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 2820273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 2821d6dfbf8fSBarry Smith 282217699dbbSLois Curfman McInnes a->size = oldmat->size; 282317699dbbSLois Curfman McInnes a->rank = oldmat->rank; 2824e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 2825e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 2826e7641de0SSatish Balay a->rowindices = 0; 2827bcd2baecSBarry Smith a->rowvalues = 0; 2828bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 2829d6dfbf8fSBarry Smith 283026283091SBarry Smith ierr = PetscLayoutCopy(matin->rmap,&mat->rmap);CHKERRQ(ierr); 283126283091SBarry Smith ierr = PetscLayoutCopy(matin->cmap,&mat->cmap);CHKERRQ(ierr); 2832899cda47SBarry Smith 28337adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)matin)->comm,1,&mat->stash);CHKERRQ(ierr); 28342ee70a88SLois Curfman McInnes if (oldmat->colmap) { 2835aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 28360f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 2837b1fc9764SSatish Balay #else 2838d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 2839d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2840d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2841b1fc9764SSatish Balay #endif 2842416022c9SBarry Smith } else a->colmap = 0; 28433f41c07dSBarry Smith if (oldmat->garray) { 2844b1d57f15SBarry Smith PetscInt len; 2845d0f46423SBarry Smith len = oldmat->B->cmap->n; 2846b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 284752e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 2848b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 2849416022c9SBarry Smith } else a->garray = 0; 2850d6dfbf8fSBarry Smith 2851416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 285252e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 2853a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 285452e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 28552e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 285652e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 28572e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 285852e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 28597adad957SLisandro Dalcin ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 28608a729477SBarry Smith *newmat = mat; 28613a40ed3dSBarry Smith PetscFunctionReturn(0); 28628a729477SBarry Smith } 2863416022c9SBarry Smith 2864e090d566SSatish Balay #include "petscsys.h" 2865416022c9SBarry Smith 28664a2ae208SSatish Balay #undef __FUNCT__ 28674a2ae208SSatish Balay #define __FUNCT__ "MatLoad_MPIAIJ" 2868a313700dSBarry Smith PetscErrorCode MatLoad_MPIAIJ(PetscViewer viewer, const MatType type,Mat *newmat) 2869416022c9SBarry Smith { 2870d65a2f8fSBarry Smith Mat A; 287187828ca2SBarry Smith PetscScalar *vals,*svals; 287219bcc07fSBarry Smith MPI_Comm comm = ((PetscObject)viewer)->comm; 2873416022c9SBarry Smith MPI_Status status; 28746849ba73SBarry Smith PetscErrorCode ierr; 287513980483SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag,mpicnt,mpimaxnz; 28767e042019SMatthew Knepley PetscInt i,nz,j,rstart,rend,mmax,maxnz = 0; 2877b1d57f15SBarry Smith PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 2878910ba992SMatthew Knepley PetscInt *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols; 2879dc231df0SBarry Smith PetscInt cend,cstart,n,*rowners; 2880b1d57f15SBarry Smith int fd; 2881416022c9SBarry Smith 28823a40ed3dSBarry Smith PetscFunctionBegin; 28831dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 28841dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 288517699dbbSLois Curfman McInnes if (!rank) { 2886b0a32e0cSBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 28870752156aSBarry Smith ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 2888552e946dSBarry Smith if (header[0] != MAT_FILE_COOKIE) SETERRQ(PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 28896c5fab8fSBarry Smith } 28906c5fab8fSBarry Smith 2891b1d57f15SBarry Smith ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 2892416022c9SBarry Smith M = header[1]; N = header[2]; 2893416022c9SBarry Smith /* determine ownership of all rows */ 289429cdbbc8SSatish Balay m = M/size + ((M % size) > rank); 2895dc231df0SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 2896dc231df0SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 2897167e7480SBarry Smith 2898167e7480SBarry Smith /* First process needs enough room for process with most rows */ 2899167e7480SBarry Smith if (!rank) { 2900167e7480SBarry Smith mmax = rowners[1]; 2901167e7480SBarry Smith for (i=2; i<size; i++) { 2902167e7480SBarry Smith mmax = PetscMax(mmax,rowners[i]); 2903167e7480SBarry Smith } 2904167e7480SBarry Smith } else mmax = m; 2905167e7480SBarry Smith 2906416022c9SBarry Smith rowners[0] = 0; 290717699dbbSLois Curfman McInnes for (i=2; i<=size; i++) { 2908416022c9SBarry Smith rowners[i] += rowners[i-1]; 2909416022c9SBarry Smith } 291017699dbbSLois Curfman McInnes rstart = rowners[rank]; 291117699dbbSLois Curfman McInnes rend = rowners[rank+1]; 2912416022c9SBarry Smith 2913416022c9SBarry Smith /* distribute row lengths to all processors */ 2914167e7480SBarry Smith ierr = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr); 291517699dbbSLois Curfman McInnes if (!rank) { 2916dc231df0SBarry Smith ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 2917dc231df0SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 2918b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 2919b1d57f15SBarry Smith ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 2920dc231df0SBarry Smith for (j=0; j<m; j++) { 2921dc231df0SBarry Smith procsnz[0] += ourlens[j]; 2922dc231df0SBarry Smith } 2923dc231df0SBarry Smith for (i=1; i<size; i++) { 2924dc231df0SBarry Smith ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 2925dc231df0SBarry Smith /* calculate the number of nonzeros on each processor */ 2926dc231df0SBarry Smith for (j=0; j<rowners[i+1]-rowners[i]; j++) { 2927416022c9SBarry Smith procsnz[i] += rowlengths[j]; 2928416022c9SBarry Smith } 292913980483SBarry Smith mpicnt = PetscMPIIntCast(rowners[i+1]-rowners[i]); 293013980483SBarry Smith ierr = MPI_Send(rowlengths,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2931416022c9SBarry Smith } 2932606d414cSSatish Balay ierr = PetscFree(rowlengths);CHKERRQ(ierr); 2933dc231df0SBarry Smith } else { 293413980483SBarry Smith mpicnt = PetscMPIIntCast(m);CHKERRQ(ierr); 293513980483SBarry Smith ierr = MPI_Recv(ourlens,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 2936dc231df0SBarry Smith } 2937416022c9SBarry Smith 2938dc231df0SBarry Smith if (!rank) { 2939416022c9SBarry Smith /* determine max buffer needed and allocate it */ 2940416022c9SBarry Smith maxnz = 0; 29418a8e0b3aSBarry Smith for (i=0; i<size; i++) { 29420452661fSBarry Smith maxnz = PetscMax(maxnz,procsnz[i]); 2943416022c9SBarry Smith } 2944b1d57f15SBarry Smith ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 2945416022c9SBarry Smith 2946416022c9SBarry Smith /* read in my part of the matrix column indices */ 2947416022c9SBarry Smith nz = procsnz[0]; 2948b1d57f15SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 29490752156aSBarry Smith ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 2950d65a2f8fSBarry Smith 2951d65a2f8fSBarry Smith /* read in every one elses and ship off */ 295217699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 2953d65a2f8fSBarry Smith nz = procsnz[i]; 29540752156aSBarry Smith ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 295513980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 295613980483SBarry Smith ierr = MPI_Send(cols,mpicnt,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 2957d65a2f8fSBarry Smith } 2958606d414cSSatish Balay ierr = PetscFree(cols);CHKERRQ(ierr); 29593a40ed3dSBarry Smith } else { 2960416022c9SBarry Smith /* determine buffer space needed for message */ 2961416022c9SBarry Smith nz = 0; 2962416022c9SBarry Smith for (i=0; i<m; i++) { 2963416022c9SBarry Smith nz += ourlens[i]; 2964416022c9SBarry Smith } 2965dc231df0SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 2966416022c9SBarry Smith 2967416022c9SBarry Smith /* receive message of column indices*/ 296813980483SBarry Smith mpicnt = PetscMPIIntCast(nz);CHKERRQ(ierr); 296913980483SBarry Smith ierr = MPI_Recv(mycols,mpicnt,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 297013980483SBarry Smith ierr = MPI_Get_count(&status,MPIU_INT,&mpimaxnz);CHKERRQ(ierr); 29717c533972SBarry Smith if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);} 297213980483SBarry Smith else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);} 297313980483SBarry Smith else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);} 2974416022c9SBarry Smith } 2975416022c9SBarry Smith 2976b362ba68SBarry Smith /* determine column ownership if matrix is not square */ 2977b362ba68SBarry Smith if (N != M) { 2978b362ba68SBarry Smith n = N/size + ((N % size) > rank); 2979b1d57f15SBarry Smith ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 2980b362ba68SBarry Smith cstart = cend - n; 2981b362ba68SBarry Smith } else { 2982b362ba68SBarry Smith cstart = rstart; 2983b362ba68SBarry Smith cend = rend; 2984fb2e594dSBarry Smith n = cend - cstart; 2985b362ba68SBarry Smith } 2986b362ba68SBarry Smith 2987416022c9SBarry Smith /* loop over local rows, determining number of off diagonal entries */ 2988b1d57f15SBarry Smith ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 2989416022c9SBarry Smith jj = 0; 2990416022c9SBarry Smith for (i=0; i<m; i++) { 2991416022c9SBarry Smith for (j=0; j<ourlens[i]; j++) { 2992b362ba68SBarry Smith if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 2993416022c9SBarry Smith jj++; 2994416022c9SBarry Smith } 2995416022c9SBarry Smith } 2996d65a2f8fSBarry Smith 2997d65a2f8fSBarry Smith /* create our matrix */ 2998416022c9SBarry Smith for (i=0; i<m; i++) { 2999416022c9SBarry Smith ourlens[i] -= offlens[i]; 3000416022c9SBarry Smith } 3001f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&A);CHKERRQ(ierr); 3002f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,m,n,M,N);CHKERRQ(ierr); 3003d10c748bSKris Buschelman ierr = MatSetType(A,type);CHKERRQ(ierr); 3004d10c748bSKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,ourlens,0,offlens);CHKERRQ(ierr); 3005d10c748bSKris Buschelman 3006d65a2f8fSBarry Smith for (i=0; i<m; i++) { 3007d65a2f8fSBarry Smith ourlens[i] += offlens[i]; 3008d65a2f8fSBarry Smith } 3009416022c9SBarry Smith 301017699dbbSLois Curfman McInnes if (!rank) { 3011906b51c7SHong Zhang ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 3012416022c9SBarry Smith 3013416022c9SBarry Smith /* read in my part of the matrix numerical values */ 3014416022c9SBarry Smith nz = procsnz[0]; 30150752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 3016d65a2f8fSBarry Smith 3017d65a2f8fSBarry Smith /* insert into matrix */ 3018d65a2f8fSBarry Smith jj = rstart; 3019d65a2f8fSBarry Smith smycols = mycols; 3020d65a2f8fSBarry Smith svals = vals; 3021d65a2f8fSBarry Smith for (i=0; i<m; i++) { 3022dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 3023d65a2f8fSBarry Smith smycols += ourlens[i]; 3024d65a2f8fSBarry Smith svals += ourlens[i]; 3025d65a2f8fSBarry Smith jj++; 3026416022c9SBarry Smith } 3027416022c9SBarry Smith 3028d65a2f8fSBarry Smith /* read in other processors and ship out */ 302917699dbbSLois Curfman McInnes for (i=1; i<size; i++) { 3030416022c9SBarry Smith nz = procsnz[i]; 30310752156aSBarry Smith ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 303213980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 303313980483SBarry Smith ierr = MPI_Send(vals,mpicnt,MPIU_SCALAR,i,((PetscObject)A)->tag,comm);CHKERRQ(ierr); 3034416022c9SBarry Smith } 3035606d414cSSatish Balay ierr = PetscFree(procsnz);CHKERRQ(ierr); 30363a40ed3dSBarry Smith } else { 3037d65a2f8fSBarry Smith /* receive numeric values */ 303887828ca2SBarry Smith ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 3039416022c9SBarry Smith 3040d65a2f8fSBarry Smith /* receive message of values*/ 304113980483SBarry Smith mpicnt = PetscMPIIntCast(nz); 304213980483SBarry Smith ierr = MPI_Recv(vals,mpicnt,MPIU_SCALAR,0,((PetscObject)A)->tag,comm,&status);CHKERRQ(ierr); 304313980483SBarry Smith ierr = MPI_Get_count(&status,MPIU_SCALAR,&mpimaxnz);CHKERRQ(ierr); 30447c533972SBarry Smith if (mpimaxnz == MPI_UNDEFINED) {SETERRQ1(PETSC_ERR_LIB,"MPI_Get_count() returned MPI_UNDEFINED, expected %d",mpicnt);} 304513980483SBarry Smith else if (mpimaxnz < 0) {SETERRQ2(PETSC_ERR_LIB,"MPI_Get_count() returned impossible negative value %d, expected %d",mpimaxnz,mpicnt);} 304613980483SBarry Smith else if (mpimaxnz != mpicnt) {SETERRQ2(PETSC_ERR_FILE_UNEXPECTED,"something is wrong with file: expected %d received %d",mpicnt,mpimaxnz);} 3047d65a2f8fSBarry Smith 3048d65a2f8fSBarry Smith /* insert into matrix */ 3049d65a2f8fSBarry Smith jj = rstart; 3050d65a2f8fSBarry Smith smycols = mycols; 3051d65a2f8fSBarry Smith svals = vals; 3052d65a2f8fSBarry Smith for (i=0; i<m; i++) { 3053dc231df0SBarry Smith ierr = MatSetValues_MPIAIJ(A,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 3054d65a2f8fSBarry Smith smycols += ourlens[i]; 3055d65a2f8fSBarry Smith svals += ourlens[i]; 3056d65a2f8fSBarry Smith jj++; 3057d65a2f8fSBarry Smith } 3058d65a2f8fSBarry Smith } 3059dc231df0SBarry Smith ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 3060606d414cSSatish Balay ierr = PetscFree(vals);CHKERRQ(ierr); 3061606d414cSSatish Balay ierr = PetscFree(mycols);CHKERRQ(ierr); 3062606d414cSSatish Balay ierr = PetscFree(rowners);CHKERRQ(ierr); 3063d65a2f8fSBarry Smith 30646d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 30656d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3066d10c748bSKris Buschelman *newmat = A; 30673a40ed3dSBarry Smith PetscFunctionReturn(0); 3068416022c9SBarry Smith } 3069a0ff6018SBarry Smith 30704a2ae208SSatish Balay #undef __FUNCT__ 30714a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 30724aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat) 30734aa3045dSJed Brown { 30744aa3045dSJed Brown PetscErrorCode ierr; 30754aa3045dSJed Brown IS iscol_local; 30764aa3045dSJed Brown PetscInt csize; 30774aa3045dSJed Brown 30784aa3045dSJed Brown PetscFunctionBegin; 30794aa3045dSJed Brown ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr); 3080b79d0421SJed Brown if (call == MAT_REUSE_MATRIX) { 3081b79d0421SJed Brown ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr); 3082b79d0421SJed Brown if (!iscol_local) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3083b79d0421SJed Brown } else { 30844aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 3085b79d0421SJed Brown } 30864aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr); 3087b79d0421SJed Brown if (call == MAT_INITIAL_MATRIX) { 3088b79d0421SJed Brown ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr); 30894aa3045dSJed Brown ierr = ISDestroy(iscol_local);CHKERRQ(ierr); 3090b79d0421SJed Brown } 30914aa3045dSJed Brown PetscFunctionReturn(0); 30924aa3045dSJed Brown } 30934aa3045dSJed Brown 30944aa3045dSJed Brown #undef __FUNCT__ 30954aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private" 3096a0ff6018SBarry Smith /* 309729da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 309829da9460SBarry Smith in local and then by concatenating the local matrices the end result. 309929da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 31004aa3045dSJed Brown 31014aa3045dSJed Brown Note: This requires a sequential iscol with all indices. 3102a0ff6018SBarry Smith */ 31034aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3104a0ff6018SBarry Smith { 3105dfbe8321SBarry Smith PetscErrorCode ierr; 310632dcc486SBarry Smith PetscMPIInt rank,size; 3107b1d57f15SBarry Smith PetscInt i,m,n,rstart,row,rend,nz,*cwork,j; 3108b1d57f15SBarry Smith PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal; 3109fee21e36SBarry Smith Mat *local,M,Mreuse; 3110a77337e4SBarry Smith MatScalar *vwork,*aa; 31117adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 311200e6dbe6SBarry Smith Mat_SeqAIJ *aij; 31137e2c5f70SBarry Smith 3114a0ff6018SBarry Smith 3115a0ff6018SBarry Smith PetscFunctionBegin; 31161dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 31171dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 311800e6dbe6SBarry Smith 3119fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3120fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr); 3121e005ede5SBarry Smith if (!Mreuse) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3122fee21e36SBarry Smith local = &Mreuse; 3123fee21e36SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr); 3124fee21e36SBarry Smith } else { 3125a0ff6018SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr); 3126fee21e36SBarry Smith Mreuse = *local; 3127606d414cSSatish Balay ierr = PetscFree(local);CHKERRQ(ierr); 3128fee21e36SBarry Smith } 3129a0ff6018SBarry Smith 3130a0ff6018SBarry Smith /* 3131a0ff6018SBarry Smith m - number of local rows 3132a0ff6018SBarry Smith n - number of columns (same on all processors) 3133a0ff6018SBarry Smith rstart - first row in new global matrix generated 3134a0ff6018SBarry Smith */ 3135fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3136a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3137fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 313800e6dbe6SBarry Smith ii = aij->i; 313900e6dbe6SBarry Smith jj = aij->j; 314000e6dbe6SBarry Smith 3141a0ff6018SBarry Smith /* 314200e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 314300e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3144a0ff6018SBarry Smith */ 314500e6dbe6SBarry Smith 314600e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 31476a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3148ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3149ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3150e2c4fddaSBarry Smith nlocal = m; 31516a6a5d1dSBarry Smith } else { 3152ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3153ab50ec6bSBarry Smith } 3154ab50ec6bSBarry Smith } else { 31556a6a5d1dSBarry Smith nlocal = csize; 31566a6a5d1dSBarry Smith } 3157b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 315800e6dbe6SBarry Smith rstart = rend - nlocal; 31596a6a5d1dSBarry Smith if (rank == size - 1 && rend != n) { 316077431f27SBarry Smith SETERRQ2(PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 31616a6a5d1dSBarry Smith } 316200e6dbe6SBarry Smith 316300e6dbe6SBarry Smith /* next, compute all the lengths */ 3164b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 316500e6dbe6SBarry Smith olens = dlens + m; 316600e6dbe6SBarry Smith for (i=0; i<m; i++) { 316700e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 316800e6dbe6SBarry Smith olen = 0; 316900e6dbe6SBarry Smith dlen = 0; 317000e6dbe6SBarry Smith for (j=0; j<jend; j++) { 317100e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 317200e6dbe6SBarry Smith else dlen++; 317300e6dbe6SBarry Smith jj++; 317400e6dbe6SBarry Smith } 317500e6dbe6SBarry Smith olens[i] = olen; 317600e6dbe6SBarry Smith dlens[i] = dlen; 317700e6dbe6SBarry Smith } 3178f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3179f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 31807adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3181e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3182606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3183a0ff6018SBarry Smith } else { 3184b1d57f15SBarry Smith PetscInt ml,nl; 3185a0ff6018SBarry Smith 3186a0ff6018SBarry Smith M = *newmat; 3187a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 318829bbc08cSBarry Smith if (ml != m) SETERRQ(PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3189a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3190c48de900SBarry Smith /* 3191c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3192c48de900SBarry Smith rather than the slower MatSetValues(). 3193c48de900SBarry Smith */ 3194c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3195c48de900SBarry Smith M->assembled = PETSC_FALSE; 3196a0ff6018SBarry Smith } 3197a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3198fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 319900e6dbe6SBarry Smith ii = aij->i; 320000e6dbe6SBarry Smith jj = aij->j; 320100e6dbe6SBarry Smith aa = aij->a; 3202a0ff6018SBarry Smith for (i=0; i<m; i++) { 3203a0ff6018SBarry Smith row = rstart + i; 320400e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 320500e6dbe6SBarry Smith cwork = jj; jj += nz; 320600e6dbe6SBarry Smith vwork = aa; aa += nz; 32078c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3208a0ff6018SBarry Smith } 3209a0ff6018SBarry Smith 3210a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3211a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3212a0ff6018SBarry Smith *newmat = M; 3213fee21e36SBarry Smith 3214fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3215fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3216fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3217fee21e36SBarry Smith ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr); 3218fee21e36SBarry Smith } 3219fee21e36SBarry Smith 3220a0ff6018SBarry Smith PetscFunctionReturn(0); 3221a0ff6018SBarry Smith } 3222273d9f13SBarry Smith 3223e2e86b8fSSatish Balay EXTERN_C_BEGIN 32244a2ae208SSatish Balay #undef __FUNCT__ 3225ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 3226b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3227ccd8e176SBarry Smith { 3228899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3229899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3230ccd8e176SBarry Smith const PetscInt *JJ; 3231ccd8e176SBarry Smith PetscScalar *values; 3232ccd8e176SBarry Smith PetscErrorCode ierr; 3233ccd8e176SBarry Smith 3234ccd8e176SBarry Smith PetscFunctionBegin; 3235b7940d39SSatish Balay if (Ii[0]) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3236899cda47SBarry Smith 323726283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 323826283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 323926283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 324026283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3241d0f46423SBarry Smith m = B->rmap->n; 3242d0f46423SBarry Smith cstart = B->cmap->rstart; 3243d0f46423SBarry Smith cend = B->cmap->rend; 3244d0f46423SBarry Smith rstart = B->rmap->rstart; 3245899cda47SBarry Smith 3246ccd8e176SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 3247ccd8e176SBarry Smith o_nnz = d_nnz + m; 3248ccd8e176SBarry Smith 3249ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3250ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3251ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3252ecc77c7aSBarry Smith JJ = J + Ii[i]; 3253ecc77c7aSBarry Smith if (nnz < 0) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3254ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3255d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3256ecc77c7aSBarry Smith for (j=1; j<nnz; j++) { 3257ecc77c7aSBarry Smith if (JJ[i] <= JJ[i-1]) SETERRRQ(PETSC_ERR_ARG_WRONGSTATE,"Row %D has unsorted column index at %D location in column indices",i,j); 3258ecc77c7aSBarry Smith } 3259ecc77c7aSBarry Smith } 3260ecc77c7aSBarry Smith #endif 3261ecc77c7aSBarry Smith 3262ccd8e176SBarry Smith for (i=0; i<m; i++) { 3263b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3264b7940d39SSatish Balay JJ = J + Ii[i]; 3265ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3266ccd8e176SBarry Smith for (j=0; j<nnz; j++) { 3267ccd8e176SBarry Smith if (*JJ >= cstart) break; 3268ccd8e176SBarry Smith JJ++; 3269ccd8e176SBarry Smith } 3270ccd8e176SBarry Smith d = 0; 3271ccd8e176SBarry Smith for (; j<nnz; j++) { 3272ccd8e176SBarry Smith if (*JJ++ >= cend) break; 3273ccd8e176SBarry Smith d++; 3274ccd8e176SBarry Smith } 3275ccd8e176SBarry Smith d_nnz[i] = d; 3276ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3277ccd8e176SBarry Smith } 3278ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 3279ccd8e176SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 3280ccd8e176SBarry Smith 3281ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3282ccd8e176SBarry Smith else { 3283ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3284ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3285ccd8e176SBarry Smith } 3286ccd8e176SBarry Smith 3287ccd8e176SBarry Smith for (i=0; i<m; i++) { 3288ccd8e176SBarry Smith ii = i + rstart; 3289b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3290b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3291ccd8e176SBarry Smith } 3292ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3293ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3294ccd8e176SBarry Smith 3295ccd8e176SBarry Smith if (!v) { 3296ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3297ccd8e176SBarry Smith } 3298ccd8e176SBarry Smith PetscFunctionReturn(0); 3299ccd8e176SBarry Smith } 3300e2e86b8fSSatish Balay EXTERN_C_END 3301ccd8e176SBarry Smith 3302ccd8e176SBarry Smith #undef __FUNCT__ 3303ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 33041eea217eSSatish Balay /*@ 3305ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3306ccd8e176SBarry Smith (the default parallel PETSc format). 3307ccd8e176SBarry Smith 3308ccd8e176SBarry Smith Collective on MPI_Comm 3309ccd8e176SBarry Smith 3310ccd8e176SBarry Smith Input Parameters: 3311a1661176SMatthew Knepley + B - the matrix 3312ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 3313ccd8e176SBarry Smith . j - the column indices for each local row (starts with zero) these must be sorted for each row 3314ccd8e176SBarry Smith - v - optional values in the matrix 3315ccd8e176SBarry Smith 3316ccd8e176SBarry Smith Level: developer 3317ccd8e176SBarry Smith 331812251496SSatish Balay Notes: 331912251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 332012251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 332112251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 332212251496SSatish Balay 332312251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 332412251496SSatish Balay 332512251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 332612251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 332712251496SSatish Balay as shown: 332812251496SSatish Balay 332912251496SSatish Balay 1 0 0 333012251496SSatish Balay 2 0 3 P0 333112251496SSatish Balay ------- 333212251496SSatish Balay 4 5 6 P1 333312251496SSatish Balay 333412251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 333512251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 333612251496SSatish Balay j = {0,0,2} [size = nz = 6] 333712251496SSatish Balay v = {1,2,3} [size = nz = 6] 333812251496SSatish Balay 333912251496SSatish Balay Process1 [P1]: rows_owned=[2] 334012251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 334112251496SSatish Balay j = {0,1,2} [size = nz = 6] 334212251496SSatish Balay v = {4,5,6} [size = nz = 6] 334312251496SSatish Balay 3344ecc77c7aSBarry Smith The column indices for each row MUST be sorted. 33452fb0ec9aSBarry Smith 3346ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3347ccd8e176SBarry Smith 33482fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ, 33498d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3350ccd8e176SBarry Smith @*/ 3351be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3352ccd8e176SBarry Smith { 3353ccd8e176SBarry Smith PetscErrorCode ierr,(*f)(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]); 3354ccd8e176SBarry Smith 3355ccd8e176SBarry Smith PetscFunctionBegin; 3356ccd8e176SBarry Smith ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C",(void (**)(void))&f);CHKERRQ(ierr); 3357ccd8e176SBarry Smith if (f) { 3358ccd8e176SBarry Smith ierr = (*f)(B,i,j,v);CHKERRQ(ierr); 3359ccd8e176SBarry Smith } 3360ccd8e176SBarry Smith PetscFunctionReturn(0); 3361ccd8e176SBarry Smith } 3362ccd8e176SBarry Smith 3363ccd8e176SBarry Smith #undef __FUNCT__ 33644a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3365273d9f13SBarry Smith /*@C 3366ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3367273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3368273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3369273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3370273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3371273d9f13SBarry Smith 3372273d9f13SBarry Smith Collective on MPI_Comm 3373273d9f13SBarry Smith 3374273d9f13SBarry Smith Input Parameters: 3375273d9f13SBarry Smith + A - the matrix 3376273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3377273d9f13SBarry Smith (same value is used for all local rows) 3378273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3379273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3380273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3381273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3382273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3383273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3384273d9f13SBarry Smith submatrix (same value is used for all local rows). 3385273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3386273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3387273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3388273d9f13SBarry Smith structure. The size of this array is equal to the number 3389273d9f13SBarry Smith of local rows, i.e 'm'. 3390273d9f13SBarry Smith 339149a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 339249a6f317SBarry Smith 3393273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 3394ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 3395ccd8e176SBarry Smith storage. The stored row and column indices begin with zero. See the users manual for details. 3396273d9f13SBarry Smith 3397273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 3398273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 3399273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 3400273d9f13SBarry Smith 3401273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 3402273d9f13SBarry Smith as the submatrix which is obtained by extraction the part corresponding 3403273d9f13SBarry Smith to the rows r1-r2 and columns r1-r2 of the global matrix, where r1 is the 3404273d9f13SBarry Smith first row that belongs to the processor, and r2 is the last row belonging 3405273d9f13SBarry Smith to the this processor. This is a square mxm matrix. The remaining portion 3406273d9f13SBarry Smith of the local submatrix (mxN) constitute the OFF-DIAGONAL portion. 3407273d9f13SBarry Smith 3408273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3409273d9f13SBarry Smith 3410aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 3411aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3412aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 3413aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 3414aa95bbe8SBarry Smith 3415273d9f13SBarry Smith Example usage: 3416273d9f13SBarry Smith 3417273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3418273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3419273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3420273d9f13SBarry Smith as follows: 3421273d9f13SBarry Smith 3422273d9f13SBarry Smith .vb 3423273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3424273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3425273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3426273d9f13SBarry Smith ------------------------------------- 3427273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3428273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3429273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3430273d9f13SBarry Smith ------------------------------------- 3431273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3432273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3433273d9f13SBarry Smith .ve 3434273d9f13SBarry Smith 3435273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3436273d9f13SBarry Smith 3437273d9f13SBarry Smith .vb 3438273d9f13SBarry Smith A B C 3439273d9f13SBarry Smith D E F 3440273d9f13SBarry Smith G H I 3441273d9f13SBarry Smith .ve 3442273d9f13SBarry Smith 3443273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3444273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3445273d9f13SBarry Smith 3446273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3447273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3448273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3449273d9f13SBarry Smith 3450273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3451273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3452273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3453273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3454273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3455273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3456273d9f13SBarry Smith 3457273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3458273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3459273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3460273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3461273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3462273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3463273d9f13SBarry Smith .vb 3464273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3465273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3466273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3467273d9f13SBarry Smith .ve 3468273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3469273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3470273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3471273d9f13SBarry Smith 34 values. 3472273d9f13SBarry Smith 3473273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3474273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3475273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3476273d9f13SBarry Smith .vb 3477273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3478273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3479273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3480273d9f13SBarry Smith .ve 3481273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3482273d9f13SBarry Smith hence pre-allocation is perfect. 3483273d9f13SBarry Smith 3484273d9f13SBarry Smith Level: intermediate 3485273d9f13SBarry Smith 3486273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3487273d9f13SBarry Smith 3488ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(), 3489aa95bbe8SBarry Smith MPIAIJ, MatGetInfo() 3490273d9f13SBarry Smith @*/ 3491be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3492273d9f13SBarry Smith { 3493b1d57f15SBarry Smith PetscErrorCode ierr,(*f)(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]); 3494273d9f13SBarry Smith 3495273d9f13SBarry Smith PetscFunctionBegin; 3496a23d5eceSKris Buschelman ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAIJSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr); 3497a23d5eceSKris Buschelman if (f) { 3498a23d5eceSKris Buschelman ierr = (*f)(B,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3499273d9f13SBarry Smith } 3500273d9f13SBarry Smith PetscFunctionReturn(0); 3501273d9f13SBarry Smith } 3502273d9f13SBarry Smith 35034a2ae208SSatish Balay #undef __FUNCT__ 35042fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 350558d36128SBarry Smith /*@ 35062fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 35072fb0ec9aSBarry Smith CSR format the local rows. 35082fb0ec9aSBarry Smith 35092fb0ec9aSBarry Smith Collective on MPI_Comm 35102fb0ec9aSBarry Smith 35112fb0ec9aSBarry Smith Input Parameters: 35122fb0ec9aSBarry Smith + comm - MPI communicator 35132fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 35142fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 35152fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 35162fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 35172fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 35182fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 35192fb0ec9aSBarry Smith . i - row indices 35202fb0ec9aSBarry Smith . j - column indices 35212fb0ec9aSBarry Smith - a - matrix values 35222fb0ec9aSBarry Smith 35232fb0ec9aSBarry Smith Output Parameter: 35242fb0ec9aSBarry Smith . mat - the matrix 352503bfb495SBarry Smith 35262fb0ec9aSBarry Smith Level: intermediate 35272fb0ec9aSBarry Smith 35282fb0ec9aSBarry Smith Notes: 35292fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 35302fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 35318d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 35322fb0ec9aSBarry Smith 353312251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 353412251496SSatish Balay 353512251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 353612251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 353712251496SSatish Balay as shown: 353812251496SSatish Balay 353912251496SSatish Balay 1 0 0 354012251496SSatish Balay 2 0 3 P0 354112251496SSatish Balay ------- 354212251496SSatish Balay 4 5 6 P1 354312251496SSatish Balay 354412251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 354512251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 354612251496SSatish Balay j = {0,0,2} [size = nz = 6] 354712251496SSatish Balay v = {1,2,3} [size = nz = 6] 354812251496SSatish Balay 354912251496SSatish Balay Process1 [P1]: rows_owned=[2] 355012251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 355112251496SSatish Balay j = {0,1,2} [size = nz = 6] 355212251496SSatish Balay v = {4,5,6} [size = nz = 6] 35532fb0ec9aSBarry Smith 35542fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 35552fb0ec9aSBarry Smith 35562fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 35578d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays() 35582fb0ec9aSBarry Smith @*/ 355982b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 35602fb0ec9aSBarry Smith { 35612fb0ec9aSBarry Smith PetscErrorCode ierr; 35622fb0ec9aSBarry Smith 35632fb0ec9aSBarry Smith PetscFunctionBegin; 35642fb0ec9aSBarry Smith if (i[0]) { 35652fb0ec9aSBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 35662fb0ec9aSBarry Smith } 35672fb0ec9aSBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 35682fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 3569d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 35702fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 35712fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 35722fb0ec9aSBarry Smith PetscFunctionReturn(0); 35732fb0ec9aSBarry Smith } 35742fb0ec9aSBarry Smith 35752fb0ec9aSBarry Smith #undef __FUNCT__ 35764a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ" 3577273d9f13SBarry Smith /*@C 3578273d9f13SBarry Smith MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format 3579273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3580273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3581273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3582273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3583273d9f13SBarry Smith 3584273d9f13SBarry Smith Collective on MPI_Comm 3585273d9f13SBarry Smith 3586273d9f13SBarry Smith Input Parameters: 3587273d9f13SBarry Smith + comm - MPI communicator 3588273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 3589273d9f13SBarry Smith This value should be the same as the local size used in creating the 3590273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 3591273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 3592273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 3593273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 3594273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 3595273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 3596273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3597273d9f13SBarry Smith (same value is used for all local rows) 3598273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3599273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3600273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3601273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3602273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3603273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3604273d9f13SBarry Smith submatrix (same value is used for all local rows). 3605273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3606273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3607273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3608273d9f13SBarry Smith structure. The size of this array is equal to the number 3609273d9f13SBarry Smith of local rows, i.e 'm'. 3610273d9f13SBarry Smith 3611273d9f13SBarry Smith Output Parameter: 3612273d9f13SBarry Smith . A - the matrix 3613273d9f13SBarry Smith 3614175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 3615ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 3616175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 3617175b88e8SBarry Smith 3618273d9f13SBarry Smith Notes: 361949a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 362049a6f317SBarry Smith 3621273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 3622273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 3623273d9f13SBarry Smith storage requirements for this matrix. 3624273d9f13SBarry Smith 3625273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 3626273d9f13SBarry Smith processor than it must be used on all processors that share the object for 3627273d9f13SBarry Smith that argument. 3628273d9f13SBarry Smith 3629273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 3630273d9f13SBarry Smith (possibly both). 3631273d9f13SBarry Smith 363233a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 363333a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 363433a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 363533a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 363633a7c187SSatish Balay values corresponding to [m x N] submatrix. 3637273d9f13SBarry Smith 363833a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 363933a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 364033a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 364133a7c187SSatish Balay 364233a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 364333a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 364433a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 364533a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 364633a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 364733a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 364833a7c187SSatish Balay illustrates this concept. 364933a7c187SSatish Balay 365033a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 365133a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 365233a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 365333a7c187SSatish Balay local matrix (a rectangular submatrix). 3654273d9f13SBarry Smith 3655273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3656273d9f13SBarry Smith 365797d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 365897d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 365997d05335SKris Buschelman type of communicator, use the construction mechanism: 366078102f6cSMatthew Knepley MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...); 366197d05335SKris Buschelman 3662273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 3663273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 3664273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 3665273d9f13SBarry Smith 3666273d9f13SBarry Smith Options Database Keys: 3667923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 3668923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3669273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 3670273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 3671273d9f13SBarry Smith the user still MUST index entries starting at 0! 3672273d9f13SBarry Smith 3673273d9f13SBarry Smith 3674273d9f13SBarry Smith Example usage: 3675273d9f13SBarry Smith 3676273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3677273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3678273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3679273d9f13SBarry Smith as follows: 3680273d9f13SBarry Smith 3681273d9f13SBarry Smith .vb 3682273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3683273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3684273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3685273d9f13SBarry Smith ------------------------------------- 3686273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3687273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3688273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3689273d9f13SBarry Smith ------------------------------------- 3690273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3691273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3692273d9f13SBarry Smith .ve 3693273d9f13SBarry Smith 3694273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3695273d9f13SBarry Smith 3696273d9f13SBarry Smith .vb 3697273d9f13SBarry Smith A B C 3698273d9f13SBarry Smith D E F 3699273d9f13SBarry Smith G H I 3700273d9f13SBarry Smith .ve 3701273d9f13SBarry Smith 3702273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3703273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3704273d9f13SBarry Smith 3705273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3706273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3707273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3708273d9f13SBarry Smith 3709273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3710273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3711273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3712273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3713273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3714273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3715273d9f13SBarry Smith 3716273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3717273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3718273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3719273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3720273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3721273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3722273d9f13SBarry Smith .vb 3723273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3724273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3725273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3726273d9f13SBarry Smith .ve 3727273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3728273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3729273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3730273d9f13SBarry Smith 34 values. 3731273d9f13SBarry Smith 3732273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3733273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3734273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3735273d9f13SBarry Smith .vb 3736273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3737273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3738273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3739273d9f13SBarry Smith .ve 3740273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3741273d9f13SBarry Smith hence pre-allocation is perfect. 3742273d9f13SBarry Smith 3743273d9f13SBarry Smith Level: intermediate 3744273d9f13SBarry Smith 3745273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3746273d9f13SBarry Smith 3747ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 37482fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 3749273d9f13SBarry Smith @*/ 3750be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 3751273d9f13SBarry Smith { 37526849ba73SBarry Smith PetscErrorCode ierr; 3753b1d57f15SBarry Smith PetscMPIInt size; 3754273d9f13SBarry Smith 3755273d9f13SBarry Smith PetscFunctionBegin; 3756f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 3757f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 3758273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 3759273d9f13SBarry Smith if (size > 1) { 3760273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 3761273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3762273d9f13SBarry Smith } else { 3763273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 3764273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 3765273d9f13SBarry Smith } 3766273d9f13SBarry Smith PetscFunctionReturn(0); 3767273d9f13SBarry Smith } 3768195d93cdSBarry Smith 37694a2ae208SSatish Balay #undef __FUNCT__ 37704a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 3771be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[]) 3772195d93cdSBarry Smith { 3773195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 3774b1d57f15SBarry Smith 3775195d93cdSBarry Smith PetscFunctionBegin; 3776195d93cdSBarry Smith *Ad = a->A; 3777195d93cdSBarry Smith *Ao = a->B; 3778195d93cdSBarry Smith *colmap = a->garray; 3779195d93cdSBarry Smith PetscFunctionReturn(0); 3780195d93cdSBarry Smith } 3781a2243be0SBarry Smith 3782a2243be0SBarry Smith #undef __FUNCT__ 3783a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 3784dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 3785a2243be0SBarry Smith { 3786dfbe8321SBarry Smith PetscErrorCode ierr; 3787b1d57f15SBarry Smith PetscInt i; 3788a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3789a2243be0SBarry Smith 3790a2243be0SBarry Smith PetscFunctionBegin; 37918ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 379208b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 3793a2243be0SBarry Smith ISColoring ocoloring; 3794a2243be0SBarry Smith 3795a2243be0SBarry Smith /* set coloring for diagonal portion */ 3796a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 3797a2243be0SBarry Smith 3798a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 37997adad957SLisandro Dalcin ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr); 3800d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3801d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3802a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 3803a2243be0SBarry Smith } 3804a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 3805d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3806a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3807a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3808a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 380908b6dcc0SBarry Smith ISColoringValue *colors; 3810b1d57f15SBarry Smith PetscInt *larray; 3811a2243be0SBarry Smith ISColoring ocoloring; 3812a2243be0SBarry Smith 3813a2243be0SBarry Smith /* set coloring for diagonal portion */ 3814d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3815d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3816d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 3817a2243be0SBarry Smith } 3818d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr); 3819d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3820d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3821a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3822a2243be0SBarry Smith } 3823a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3824d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3825a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 3826a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3827a2243be0SBarry Smith 3828a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 3829d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3830d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr); 3831d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3832d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3833a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3834a2243be0SBarry Smith } 3835a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3836d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3837a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3838a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3839a2243be0SBarry Smith } else { 384077431f27SBarry Smith SETERRQ1(PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 3841a2243be0SBarry Smith } 3842a2243be0SBarry Smith 3843a2243be0SBarry Smith PetscFunctionReturn(0); 3844a2243be0SBarry Smith } 3845a2243be0SBarry Smith 3846dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 3847a2243be0SBarry Smith #undef __FUNCT__ 3848779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ" 3849dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues) 3850a2243be0SBarry Smith { 3851a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3852dfbe8321SBarry Smith PetscErrorCode ierr; 3853a2243be0SBarry Smith 3854a2243be0SBarry Smith PetscFunctionBegin; 3855779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr); 3856779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr); 3857779c1a83SBarry Smith PetscFunctionReturn(0); 3858779c1a83SBarry Smith } 3859dcf5cc72SBarry Smith #endif 3860779c1a83SBarry Smith 3861779c1a83SBarry Smith #undef __FUNCT__ 3862779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 3863b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 3864779c1a83SBarry Smith { 3865779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3866dfbe8321SBarry Smith PetscErrorCode ierr; 3867779c1a83SBarry Smith 3868779c1a83SBarry Smith PetscFunctionBegin; 3869779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 3870779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 3871a2243be0SBarry Smith PetscFunctionReturn(0); 3872a2243be0SBarry Smith } 3873c5d6d63eSBarry Smith 3874c5d6d63eSBarry Smith #undef __FUNCT__ 387551dd7536SBarry Smith #define __FUNCT__ "MatMerge" 3876bc08b0f1SBarry Smith /*@ 387751dd7536SBarry Smith MatMerge - Creates a single large PETSc matrix by concatinating sequential 387851dd7536SBarry Smith matrices from each processor 3879c5d6d63eSBarry Smith 3880c5d6d63eSBarry Smith Collective on MPI_Comm 3881c5d6d63eSBarry Smith 3882c5d6d63eSBarry Smith Input Parameters: 388351dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 3884d6bb3c2dSHong Zhang . inmat - the input sequential matrices 38850e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3886d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 388751dd7536SBarry Smith 388851dd7536SBarry Smith Output Parameter: 388951dd7536SBarry Smith . outmat - the parallel matrix generated 3890c5d6d63eSBarry Smith 38917e25d530SSatish Balay Level: advanced 38927e25d530SSatish Balay 3893f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 3894c5d6d63eSBarry Smith 3895c5d6d63eSBarry Smith @*/ 3896be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 3897c5d6d63eSBarry Smith { 3898dfbe8321SBarry Smith PetscErrorCode ierr; 3899b7940d39SSatish Balay PetscInt m,N,i,rstart,nnz,Ii,*dnz,*onz; 3900ba8c8a56SBarry Smith PetscInt *indx; 3901ba8c8a56SBarry Smith PetscScalar *values; 3902c5d6d63eSBarry Smith 3903c5d6d63eSBarry Smith PetscFunctionBegin; 39040e36024fSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 3905d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 3906d6bb3c2dSHong Zhang /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */ 39070e36024fSHong Zhang if (n == PETSC_DECIDE){ 3908357abbc8SBarry Smith ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 39090e36024fSHong Zhang } 3910357abbc8SBarry Smith ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 3911357abbc8SBarry Smith rstart -= m; 3912d6bb3c2dSHong Zhang 3913d6bb3c2dSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 3914d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3915ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3916d6bb3c2dSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 3917ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3918d6bb3c2dSHong Zhang } 3919d6bb3c2dSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 3920f69a0ea3SMatthew Knepley ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 3921f69a0ea3SMatthew Knepley ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 3922d6bb3c2dSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 3923d6bb3c2dSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 3924d6bb3c2dSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 3925d6bb3c2dSHong Zhang 3926d6bb3c2dSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 3927d6bb3c2dSHong Zhang ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr); 3928d6bb3c2dSHong Zhang } else { 392977431f27SBarry Smith SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 3930d6bb3c2dSHong Zhang } 3931d6bb3c2dSHong Zhang 3932d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3933ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3934b7940d39SSatish Balay Ii = i + rstart; 3935b7940d39SSatish Balay ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3936ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 3937d6bb3c2dSHong Zhang } 3938d6bb3c2dSHong Zhang ierr = MatDestroy(inmat);CHKERRQ(ierr); 3939d6bb3c2dSHong Zhang ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3940d6bb3c2dSHong Zhang ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 394151dd7536SBarry Smith 3942c5d6d63eSBarry Smith PetscFunctionReturn(0); 3943c5d6d63eSBarry Smith } 3944c5d6d63eSBarry Smith 3945c5d6d63eSBarry Smith #undef __FUNCT__ 3946c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 3947dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 3948c5d6d63eSBarry Smith { 3949dfbe8321SBarry Smith PetscErrorCode ierr; 395032dcc486SBarry Smith PetscMPIInt rank; 3951b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 3952de4209c5SBarry Smith size_t len; 3953b1d57f15SBarry Smith const PetscInt *indx; 3954c5d6d63eSBarry Smith PetscViewer out; 3955c5d6d63eSBarry Smith char *name; 3956c5d6d63eSBarry Smith Mat B; 3957b3cc6726SBarry Smith const PetscScalar *values; 3958c5d6d63eSBarry Smith 3959c5d6d63eSBarry Smith PetscFunctionBegin; 3960c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 3961c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 3962f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 3963f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 3964f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 3965f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 3966f204ca49SKris Buschelman ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr); 3967c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 3968c5d6d63eSBarry Smith for (i=0;i<m;i++) { 3969c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3970c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 3971c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 3972c5d6d63eSBarry Smith } 3973c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3974c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3975c5d6d63eSBarry Smith 39767adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 3977c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 3978c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 3979c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 3980852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 3981c5d6d63eSBarry Smith ierr = PetscFree(name); 3982c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 3983c5d6d63eSBarry Smith ierr = PetscViewerDestroy(out);CHKERRQ(ierr); 3984c5d6d63eSBarry Smith ierr = MatDestroy(B);CHKERRQ(ierr); 3985c5d6d63eSBarry Smith PetscFunctionReturn(0); 3986c5d6d63eSBarry Smith } 3987e5f2cdd8SHong Zhang 398851a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat); 398951a7d1a8SHong Zhang #undef __FUNCT__ 399051a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 3991be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 399251a7d1a8SHong Zhang { 399351a7d1a8SHong Zhang PetscErrorCode ierr; 3994671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 3995776b82aeSLisandro Dalcin PetscContainer container; 399651a7d1a8SHong Zhang 399751a7d1a8SHong Zhang PetscFunctionBegin; 3998671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 3999671beff6SHong Zhang if (container) { 4000776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 400151a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 40023e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 40033e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 400451a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 400551a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 400602c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 400702c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 400805b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 400905b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 401005b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 401126283091SBarry Smith ierr = PetscLayoutDestroy(merge->rowmap);CHKERRQ(ierr); 4012671beff6SHong Zhang 4013776b82aeSLisandro Dalcin ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 4014671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 4015671beff6SHong Zhang } 401651a7d1a8SHong Zhang ierr = PetscFree(merge);CHKERRQ(ierr); 401751a7d1a8SHong Zhang 401851a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 401951a7d1a8SHong Zhang PetscFunctionReturn(0); 402051a7d1a8SHong Zhang } 402151a7d1a8SHong Zhang 40227c4f633dSBarry Smith #include "../src/mat/utils/freespace.h" 4023be0fcf8dSHong Zhang #include "petscbt.h" 40244ebed01fSBarry Smith 4025e5f2cdd8SHong Zhang #undef __FUNCT__ 402638f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric" 4027e5f2cdd8SHong Zhang /*@C 4028f08fae4eSHong Zhang MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential 4029e5f2cdd8SHong Zhang matrices from each processor 4030e5f2cdd8SHong Zhang 4031e5f2cdd8SHong Zhang Collective on MPI_Comm 4032e5f2cdd8SHong Zhang 4033e5f2cdd8SHong Zhang Input Parameters: 4034e5f2cdd8SHong Zhang + comm - the communicators the parallel matrix will live on 4035f08fae4eSHong Zhang . seqmat - the input sequential matrices 40360e36024fSHong Zhang . m - number of local rows (or PETSC_DECIDE) 40370e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4038e5f2cdd8SHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 4039e5f2cdd8SHong Zhang 4040e5f2cdd8SHong Zhang Output Parameter: 4041f08fae4eSHong Zhang . mpimat - the parallel matrix generated 4042e5f2cdd8SHong Zhang 4043e5f2cdd8SHong Zhang Level: advanced 4044e5f2cdd8SHong Zhang 4045affca5deSHong Zhang Notes: 4046affca5deSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 4047affca5deSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 4048affca5deSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 4049e5f2cdd8SHong Zhang @*/ 4050be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat) 405155d1abb9SHong Zhang { 405255d1abb9SHong Zhang PetscErrorCode ierr; 40537adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mpimat)->comm; 405455d1abb9SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4055b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 4056d0f46423SBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j; 4057b1d57f15SBarry Smith PetscInt proc,m; 4058b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 4059b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 4060b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 406155d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 406255d1abb9SHong Zhang MPI_Status *status; 4063a77337e4SBarry Smith MatScalar *aa=a->a; 4064dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 406555d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 4066776b82aeSLisandro Dalcin PetscContainer container; 406755d1abb9SHong Zhang 406855d1abb9SHong Zhang PetscFunctionBegin; 40694ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 40703c2c1871SHong Zhang 407155d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 407255d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 407355d1abb9SHong Zhang 407455d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 407555d1abb9SHong Zhang if (container) { 4076776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 407755d1abb9SHong Zhang } 407855d1abb9SHong Zhang bi = merge->bi; 407955d1abb9SHong Zhang bj = merge->bj; 408055d1abb9SHong Zhang buf_ri = merge->buf_ri; 408155d1abb9SHong Zhang buf_rj = merge->buf_rj; 408255d1abb9SHong Zhang 408355d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 40847a2fc3feSBarry Smith owners = merge->rowmap->range; 408555d1abb9SHong Zhang len_s = merge->len_s; 408655d1abb9SHong Zhang 408755d1abb9SHong Zhang /* send and recv matrix values */ 408855d1abb9SHong Zhang /*-----------------------------*/ 4089357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 409055d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 409155d1abb9SHong Zhang 409255d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 409355d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++){ 409455d1abb9SHong Zhang if (!len_s[proc]) continue; 409555d1abb9SHong Zhang i = owners[proc]; 409655d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 409755d1abb9SHong Zhang k++; 409855d1abb9SHong Zhang } 409955d1abb9SHong Zhang 41000c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 41010c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 410255d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 410355d1abb9SHong Zhang 410455d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 410555d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 410655d1abb9SHong Zhang 410755d1abb9SHong Zhang /* insert mat values of mpimat */ 410855d1abb9SHong Zhang /*----------------------------*/ 4109a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 4110b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 411155d1abb9SHong Zhang nextrow = buf_ri_k + merge->nrecv; 411255d1abb9SHong Zhang nextai = nextrow + merge->nrecv; 411355d1abb9SHong Zhang 411455d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ 411555d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 411655d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 411755d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 411855d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 411955d1abb9SHong Zhang } 412055d1abb9SHong Zhang 412155d1abb9SHong Zhang /* set values of ba */ 41227a2fc3feSBarry Smith m = merge->rowmap->n; 412355d1abb9SHong Zhang for (i=0; i<m; i++) { 412455d1abb9SHong Zhang arow = owners[rank] + i; 412555d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 412655d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4127a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 412855d1abb9SHong Zhang 412955d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 413055d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 413155d1abb9SHong Zhang aj = a->j + ai[arow]; 413255d1abb9SHong Zhang aa = a->a + ai[arow]; 413355d1abb9SHong Zhang nextaj = 0; 413455d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 413555d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 413655d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 413755d1abb9SHong Zhang } 413855d1abb9SHong Zhang } 413955d1abb9SHong Zhang 414055d1abb9SHong Zhang /* add received vals into ba */ 414155d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 414255d1abb9SHong Zhang /* i-th row */ 414355d1abb9SHong Zhang if (i == *nextrow[k]) { 414455d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 414555d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 414655d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 414755d1abb9SHong Zhang nextaj = 0; 414855d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 414955d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 415055d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 415155d1abb9SHong Zhang } 415255d1abb9SHong Zhang } 415355d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 415455d1abb9SHong Zhang } 415555d1abb9SHong Zhang } 415655d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 415755d1abb9SHong Zhang } 415855d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 415955d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 416055d1abb9SHong Zhang 416155d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 416255d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 416355d1abb9SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 41644ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 416555d1abb9SHong Zhang PetscFunctionReturn(0); 416655d1abb9SHong Zhang } 416738f152feSBarry Smith 416838f152feSBarry Smith #undef __FUNCT__ 416938f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic" 4170be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4171e5f2cdd8SHong Zhang { 4172f08fae4eSHong Zhang PetscErrorCode ierr; 417355a3bba9SHong Zhang Mat B_mpi; 4174c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4175b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4176b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4177d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4178b1d57f15SBarry Smith PetscInt len,proc,*dnz,*onz; 4179b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4180b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 418155d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 418258cb9c82SHong Zhang MPI_Status *status; 4183a1a86e44SBarry Smith PetscFreeSpaceList free_space=PETSC_NULL,current_space=PETSC_NULL; 4184be0fcf8dSHong Zhang PetscBT lnkbt; 418551a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4186776b82aeSLisandro Dalcin PetscContainer container; 418702c68681SHong Zhang 4188e5f2cdd8SHong Zhang PetscFunctionBegin; 41894ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 41903c2c1871SHong Zhang 419138f152feSBarry Smith /* make sure it is a PETSc comm */ 419238f152feSBarry Smith ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr); 4193e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4194e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 419555d1abb9SHong Zhang 419651a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4197c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4198e5f2cdd8SHong Zhang 41996abd8857SHong Zhang /* determine row ownership */ 4200f08fae4eSHong Zhang /*---------------------------------------------------------*/ 420126283091SBarry Smith ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr); 420226283091SBarry Smith ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr); 420326283091SBarry Smith ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr); 420426283091SBarry Smith ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr); 420526283091SBarry Smith ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr); 4206b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4207b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 420855d1abb9SHong Zhang 42097a2fc3feSBarry Smith m = merge->rowmap->n; 42107a2fc3feSBarry Smith M = merge->rowmap->N; 42117a2fc3feSBarry Smith owners = merge->rowmap->range; 42126abd8857SHong Zhang 42136abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 42146abd8857SHong Zhang /*---------------------------------------------------------*/ 42153e06a4e6SHong Zhang len_s = merge->len_s; 421651a7d1a8SHong Zhang 42172257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4218c2234fe3SHong Zhang merge->nsend = 0; 4219409913e3SHong Zhang for (proc=0; proc<size; proc++){ 42202257cef7SHong Zhang len_si[proc] = 0; 42213e06a4e6SHong Zhang if (proc == rank){ 42226abd8857SHong Zhang len_s[proc] = 0; 42233e06a4e6SHong Zhang } else { 422402c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 42253e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 42263e06a4e6SHong Zhang } 42273e06a4e6SHong Zhang if (len_s[proc]) { 4228c2234fe3SHong Zhang merge->nsend++; 42292257cef7SHong Zhang nrows = 0; 42302257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 42312257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 42322257cef7SHong Zhang } 42332257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 42342257cef7SHong Zhang len += len_si[proc]; 4235409913e3SHong Zhang } 423658cb9c82SHong Zhang } 4237409913e3SHong Zhang 42382257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 42392257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 424051a7d1a8SHong Zhang ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 424155d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4242671beff6SHong Zhang 42433e06a4e6SHong Zhang /* post the Irecv of j-structure */ 42443e06a4e6SHong Zhang /*-------------------------------*/ 42452c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 42463e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 424702c68681SHong Zhang 42483e06a4e6SHong Zhang /* post the Isend of j-structure */ 4249affca5deSHong Zhang /*--------------------------------*/ 42502257cef7SHong Zhang ierr = PetscMalloc((2*merge->nsend+1)*sizeof(MPI_Request),&si_waits);CHKERRQ(ierr); 425102c68681SHong Zhang sj_waits = si_waits + merge->nsend; 42523e06a4e6SHong Zhang 42532257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++){ 4254409913e3SHong Zhang if (!len_s[proc]) continue; 425502c68681SHong Zhang i = owners[proc]; 4256b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 425751a7d1a8SHong Zhang k++; 425851a7d1a8SHong Zhang } 425951a7d1a8SHong Zhang 42603e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 42613e06a4e6SHong Zhang /*------------------------------------------------*/ 42620c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 42630c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 426402c68681SHong Zhang 426502c68681SHong Zhang /* send and recv i-structure */ 426602c68681SHong Zhang /*---------------------------*/ 42672c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 426802c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 426902c68681SHong Zhang 4270b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 42713e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 42722257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++){ 427302c68681SHong Zhang if (!len_s[proc]) continue; 42743e06a4e6SHong Zhang /* form outgoing message for i-structure: 42753e06a4e6SHong Zhang buf_si[0]: nrows to be sent 42763e06a4e6SHong Zhang [1:nrows]: row index (global) 42773e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 42783e06a4e6SHong Zhang */ 42793e06a4e6SHong Zhang /*-------------------------------------------*/ 42802257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 42813e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 42823e06a4e6SHong Zhang buf_si[0] = nrows; 42833e06a4e6SHong Zhang buf_si_i[0] = 0; 42843e06a4e6SHong Zhang nrows = 0; 42853e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 42863e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 42873e06a4e6SHong Zhang if (anzi) { 42883e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 42893e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 42903e06a4e6SHong Zhang nrows++; 42913e06a4e6SHong Zhang } 42923e06a4e6SHong Zhang } 4293b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 429402c68681SHong Zhang k++; 42952257cef7SHong Zhang buf_si += len_si[proc]; 429602c68681SHong Zhang } 42972257cef7SHong Zhang 42980c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 42990c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 430002c68681SHong Zhang 4301ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 43023e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++){ 4303ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 43043e06a4e6SHong Zhang } 43053e06a4e6SHong Zhang 43063e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 430702c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 430802c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 43093e06a4e6SHong Zhang ierr = PetscFree(si_waits);CHKERRQ(ierr); 43102257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 43113e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4312bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 431358cb9c82SHong Zhang 4314bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4315bcc1bcd5SHong Zhang /*----------------------------------------------*/ 431658cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4317b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 431858cb9c82SHong Zhang bi[0] = 0; 431958cb9c82SHong Zhang 4320be0fcf8dSHong Zhang /* create and initialize a linked list */ 4321be0fcf8dSHong Zhang nlnk = N+1; 4322be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 432358cb9c82SHong Zhang 4324bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 432558cb9c82SHong Zhang len = 0; 4326bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4327a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 432858cb9c82SHong Zhang current_space = free_space; 432958cb9c82SHong Zhang 4330bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 4331b1d57f15SBarry Smith ierr = PetscMalloc((3*merge->nrecv+1)*sizeof(PetscInt**),&buf_ri_k);CHKERRQ(ierr); 43323e06a4e6SHong Zhang nextrow = buf_ri_k + merge->nrecv; 43333e06a4e6SHong Zhang nextai = nextrow + merge->nrecv; 43343e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++){ 43352257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 43363e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 43373e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 43382257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 43393e06a4e6SHong Zhang } 43402257cef7SHong Zhang 4341bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4342bcc1bcd5SHong Zhang len = 0; 434358cb9c82SHong Zhang for (i=0;i<m;i++) { 434458cb9c82SHong Zhang bnzi = 0; 434558cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 434658cb9c82SHong Zhang arow = owners[rank] + i; 434758cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 434858cb9c82SHong Zhang aj = a->j + ai[arow]; 4349be0fcf8dSHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 435058cb9c82SHong Zhang bnzi += nlnk; 435158cb9c82SHong Zhang /* add received col data into lnk */ 435251a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 435355d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 43543e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 43553e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 43563e06a4e6SHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 43573e06a4e6SHong Zhang bnzi += nlnk; 43583e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 43593e06a4e6SHong Zhang } 436058cb9c82SHong Zhang } 4361bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 436258cb9c82SHong Zhang 436358cb9c82SHong Zhang /* if free space is not available, make more free space */ 436458cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 43654238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 436658cb9c82SHong Zhang nspacedouble++; 436758cb9c82SHong Zhang } 436858cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4369be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4370bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4371bcc1bcd5SHong Zhang 437258cb9c82SHong Zhang current_space->array += bnzi; 437358cb9c82SHong Zhang current_space->local_used += bnzi; 437458cb9c82SHong Zhang current_space->local_remaining -= bnzi; 437558cb9c82SHong Zhang 437658cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 437758cb9c82SHong Zhang } 4378bcc1bcd5SHong Zhang 4379bcc1bcd5SHong Zhang ierr = PetscFree(buf_ri_k);CHKERRQ(ierr); 4380bcc1bcd5SHong Zhang 4381b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4382a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4383be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4384409913e3SHong Zhang 4385bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4386bcc1bcd5SHong Zhang /*---------------------------------------*/ 4387f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 438854b84b50SHong Zhang if (n==PETSC_DECIDE) { 4389f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 439054b84b50SHong Zhang } else { 4391f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 439254b84b50SHong Zhang } 4393bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4394bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4395bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 439658cb9c82SHong Zhang 43976abd8857SHong Zhang /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */ 43986abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 4399affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 4400affca5deSHong Zhang merge->bi = bi; 4401affca5deSHong Zhang merge->bj = bj; 440202c68681SHong Zhang merge->buf_ri = buf_ri; 440302c68681SHong Zhang merge->buf_rj = buf_rj; 4404de0260b3SHong Zhang merge->coi = PETSC_NULL; 4405de0260b3SHong Zhang merge->coj = PETSC_NULL; 4406de0260b3SHong Zhang merge->owners_co = PETSC_NULL; 4407affca5deSHong Zhang 4408affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 4409776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 4410776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 4411affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 4412affca5deSHong Zhang *mpimat = B_mpi; 441338f152feSBarry Smith 441438f152feSBarry Smith ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 44154ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 4416e5f2cdd8SHong Zhang PetscFunctionReturn(0); 4417e5f2cdd8SHong Zhang } 441825616d81SHong Zhang 441938f152feSBarry Smith #undef __FUNCT__ 442038f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI" 4421be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 442255d1abb9SHong Zhang { 442355d1abb9SHong Zhang PetscErrorCode ierr; 442455d1abb9SHong Zhang 442555d1abb9SHong Zhang PetscFunctionBegin; 44264ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 442755d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 442855d1abb9SHong Zhang ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 442955d1abb9SHong Zhang } 443055d1abb9SHong Zhang ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr); 44314ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 443255d1abb9SHong Zhang PetscFunctionReturn(0); 443355d1abb9SHong Zhang } 44344ebed01fSBarry Smith 443525616d81SHong Zhang #undef __FUNCT__ 443625616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat" 4437bc08b0f1SBarry Smith /*@ 443832fba14fSHong Zhang MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows 443925616d81SHong Zhang 444032fba14fSHong Zhang Not Collective 444125616d81SHong Zhang 444225616d81SHong Zhang Input Parameters: 444325616d81SHong Zhang + A - the matrix 444425616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 444525616d81SHong Zhang 444625616d81SHong Zhang Output Parameter: 444725616d81SHong Zhang . A_loc - the local sequential matrix generated 444825616d81SHong Zhang 444925616d81SHong Zhang Level: developer 445025616d81SHong Zhang 445125616d81SHong Zhang @*/ 4452be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 445325616d81SHong Zhang { 445425616d81SHong Zhang PetscErrorCode ierr; 445501b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 445601b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 445701b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 4458a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 4459a77337e4SBarry Smith PetscScalar *ca; 4460d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 44615a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 446225616d81SHong Zhang 446325616d81SHong Zhang PetscFunctionBegin; 44644ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 446501b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4466dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 4467dea91ad1SHong Zhang ci[0] = 0; 446801b7ae99SHong Zhang for (i=0; i<am; i++){ 4469dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 447001b7ae99SHong Zhang } 4471dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 4472dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 4473dea91ad1SHong Zhang k = 0; 447401b7ae99SHong Zhang for (i=0; i<am; i++) { 44755a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 44765a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 447701b7ae99SHong Zhang /* off-diagonal portion of A */ 44785a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 44795a7d977cSHong Zhang col = cmap[*bj]; 44805a7d977cSHong Zhang if (col >= cstart) break; 44815a7d977cSHong Zhang cj[k] = col; bj++; 44825a7d977cSHong Zhang ca[k++] = *ba++; 44835a7d977cSHong Zhang } 44845a7d977cSHong Zhang /* diagonal portion of A */ 44855a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 44865a7d977cSHong Zhang cj[k] = cstart + *aj++; 44875a7d977cSHong Zhang ca[k++] = *aa++; 44885a7d977cSHong Zhang } 44895a7d977cSHong Zhang /* off-diagonal portion of A */ 44905a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 44915a7d977cSHong Zhang cj[k] = cmap[*bj++]; 44925a7d977cSHong Zhang ca[k++] = *ba++; 44935a7d977cSHong Zhang } 449425616d81SHong Zhang } 4495dea91ad1SHong Zhang /* put together the new matrix */ 4496d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 4497dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4498dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 4499dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 4500e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 4501e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 4502dea91ad1SHong Zhang mat->nonew = 0; 45035a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 45045a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 4505a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 45065a7d977cSHong Zhang for (i=0; i<am; i++) { 45075a7d977cSHong Zhang /* off-diagonal portion of A */ 45085a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 45095a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 45105a7d977cSHong Zhang col = cmap[*bj]; 45115a7d977cSHong Zhang if (col >= cstart) break; 4512a77337e4SBarry Smith *cam++ = *ba++; bj++; 45135a7d977cSHong Zhang } 45145a7d977cSHong Zhang /* diagonal portion of A */ 4515ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 4516a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 45175a7d977cSHong Zhang /* off-diagonal portion of A */ 4518f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 4519a77337e4SBarry Smith *cam++ = *ba++; bj++; 4520f33d1a9aSHong Zhang } 45215a7d977cSHong Zhang } 45225a7d977cSHong Zhang } else { 45235a7d977cSHong Zhang SETERRQ1(PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 452425616d81SHong Zhang } 452501b7ae99SHong Zhang 45264ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 452725616d81SHong Zhang PetscFunctionReturn(0); 452825616d81SHong Zhang } 452925616d81SHong Zhang 453032fba14fSHong Zhang #undef __FUNCT__ 453132fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed" 453232fba14fSHong Zhang /*@C 453332fba14fSHong Zhang MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns 453432fba14fSHong Zhang 453532fba14fSHong Zhang Not Collective 453632fba14fSHong Zhang 453732fba14fSHong Zhang Input Parameters: 453832fba14fSHong Zhang + A - the matrix 453932fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 454032fba14fSHong Zhang - row, col - index sets of rows and columns to extract (or PETSC_NULL) 454132fba14fSHong Zhang 454232fba14fSHong Zhang Output Parameter: 454332fba14fSHong Zhang . A_loc - the local sequential matrix generated 454432fba14fSHong Zhang 454532fba14fSHong Zhang Level: developer 454632fba14fSHong Zhang 454732fba14fSHong Zhang @*/ 4548be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 454932fba14fSHong Zhang { 455032fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 455132fba14fSHong Zhang PetscErrorCode ierr; 455232fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 455332fba14fSHong Zhang IS isrowa,iscola; 455432fba14fSHong Zhang Mat *aloc; 455532fba14fSHong Zhang 455632fba14fSHong Zhang PetscFunctionBegin; 45574ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 455832fba14fSHong Zhang if (!row){ 4559d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 456032fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 456132fba14fSHong Zhang } else { 456232fba14fSHong Zhang isrowa = *row; 456332fba14fSHong Zhang } 456432fba14fSHong Zhang if (!col){ 4565d0f46423SBarry Smith start = A->cmap->rstart; 456632fba14fSHong Zhang cmap = a->garray; 4567d0f46423SBarry Smith nzA = a->A->cmap->n; 4568d0f46423SBarry Smith nzB = a->B->cmap->n; 456932fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 457032fba14fSHong Zhang ncols = 0; 457132fba14fSHong Zhang for (i=0; i<nzB; i++) { 457232fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 457332fba14fSHong Zhang else break; 457432fba14fSHong Zhang } 457532fba14fSHong Zhang imark = i; 457632fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 457732fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 457832fba14fSHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&iscola);CHKERRQ(ierr); 457932fba14fSHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 458032fba14fSHong Zhang } else { 458132fba14fSHong Zhang iscola = *col; 458232fba14fSHong Zhang } 458332fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX){ 458432fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 458532fba14fSHong Zhang aloc[0] = *A_loc; 458632fba14fSHong Zhang } 458732fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 458832fba14fSHong Zhang *A_loc = aloc[0]; 458932fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 459032fba14fSHong Zhang if (!row){ 459132fba14fSHong Zhang ierr = ISDestroy(isrowa);CHKERRQ(ierr); 459232fba14fSHong Zhang } 459332fba14fSHong Zhang if (!col){ 459432fba14fSHong Zhang ierr = ISDestroy(iscola);CHKERRQ(ierr); 459532fba14fSHong Zhang } 45964ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 459732fba14fSHong Zhang PetscFunctionReturn(0); 459832fba14fSHong Zhang } 459932fba14fSHong Zhang 460025616d81SHong Zhang #undef __FUNCT__ 460125616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 460225616d81SHong Zhang /*@C 460332fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 460425616d81SHong Zhang 460525616d81SHong Zhang Collective on Mat 460625616d81SHong Zhang 460725616d81SHong Zhang Input Parameters: 4608e240928fSHong Zhang + A,B - the matrices in mpiaij format 460925616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 461025616d81SHong Zhang - rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL) 461125616d81SHong Zhang 461225616d81SHong Zhang Output Parameter: 461325616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 4614d0f46423SBarry Smith . brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows 461525616d81SHong Zhang - B_seq - the sequential matrix generated 461625616d81SHong Zhang 461725616d81SHong Zhang Level: developer 461825616d81SHong Zhang 461925616d81SHong Zhang @*/ 4620be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq) 462125616d81SHong Zhang { 4622899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 462325616d81SHong Zhang PetscErrorCode ierr; 4624b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 462525616d81SHong Zhang IS isrowb,iscolb; 462625616d81SHong Zhang Mat *bseq; 462725616d81SHong Zhang 462825616d81SHong Zhang PetscFunctionBegin; 4629d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4630d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 463125616d81SHong Zhang } 46324ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 463325616d81SHong Zhang 463425616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4635d0f46423SBarry Smith start = A->cmap->rstart; 463625616d81SHong Zhang cmap = a->garray; 4637d0f46423SBarry Smith nzA = a->A->cmap->n; 4638d0f46423SBarry Smith nzB = a->B->cmap->n; 4639b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 464025616d81SHong Zhang ncols = 0; 46410390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 464225616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 464325616d81SHong Zhang else break; 464425616d81SHong Zhang } 464525616d81SHong Zhang imark = i; 46460390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 46470390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 464825616d81SHong Zhang ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,&isrowb);CHKERRQ(ierr); 464925616d81SHong Zhang ierr = PetscFree(idx);CHKERRQ(ierr); 465025616d81SHong Zhang *brstart = imark; 4651d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 465225616d81SHong Zhang } else { 465325616d81SHong Zhang if (!rowb || !colb) SETERRQ(PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 465425616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 465525616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 465625616d81SHong Zhang bseq[0] = *B_seq; 465725616d81SHong Zhang } 465825616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 465925616d81SHong Zhang *B_seq = bseq[0]; 466025616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 466125616d81SHong Zhang if (!rowb){ 466225616d81SHong Zhang ierr = ISDestroy(isrowb);CHKERRQ(ierr); 466325616d81SHong Zhang } else { 466425616d81SHong Zhang *rowb = isrowb; 466525616d81SHong Zhang } 466625616d81SHong Zhang if (!colb){ 466725616d81SHong Zhang ierr = ISDestroy(iscolb);CHKERRQ(ierr); 466825616d81SHong Zhang } else { 466925616d81SHong Zhang *colb = iscolb; 467025616d81SHong Zhang } 46714ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 467225616d81SHong Zhang PetscFunctionReturn(0); 467325616d81SHong Zhang } 4674429d309bSHong Zhang 4675a61c8c0fSHong Zhang #undef __FUNCT__ 4676a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols" 4677429d309bSHong Zhang /*@C 4678429d309bSHong Zhang MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 467901b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 4680429d309bSHong Zhang 4681429d309bSHong Zhang Collective on Mat 4682429d309bSHong Zhang 4683429d309bSHong Zhang Input Parameters: 4684429d309bSHong Zhang + A,B - the matrices in mpiaij format 468587025532SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 468687025532SHong Zhang . startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL) 468787025532SHong Zhang - bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL) 4688429d309bSHong Zhang 4689429d309bSHong Zhang Output Parameter: 469087025532SHong Zhang + B_oth - the sequential matrix generated 4691429d309bSHong Zhang 4692429d309bSHong Zhang Level: developer 4693429d309bSHong Zhang 4694429d309bSHong Zhang @*/ 4695dd6ea824SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,MatScalar **bufa_ptr,Mat *B_oth) 4696429d309bSHong Zhang { 4697a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 4698429d309bSHong Zhang PetscErrorCode ierr; 4699899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 470087025532SHong Zhang Mat_SeqAIJ *b_oth; 4701a6b2eed2SHong Zhang VecScatter ctx=a->Mvctx; 47027adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)ctx)->comm; 47037adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 4704d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 4705dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 4706dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 4707e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 4708910ba992SMatthew Knepley MPI_Request *rwaits = PETSC_NULL,*swaits = PETSC_NULL; 470987025532SHong Zhang MPI_Status *sstatus,rstatus; 4710aa5bb8c0SSatish Balay PetscMPIInt jj; 4711e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 4712ba8c8a56SBarry Smith PetscScalar *vals; 4713429d309bSHong Zhang 4714429d309bSHong Zhang PetscFunctionBegin; 4715d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4716d0f46423SBarry Smith SETERRQ4(PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 4717429d309bSHong Zhang } 47184ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4719a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 4720a6b2eed2SHong Zhang 4721a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 4722a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 4723e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 4724e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 4725a6b2eed2SHong Zhang nrecvs = gen_from->n; 4726a6b2eed2SHong Zhang nsends = gen_to->n; 4727d7ee0231SBarry Smith 4728d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 4729a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 4730a6b2eed2SHong Zhang sstarts = gen_to->starts; 4731a6b2eed2SHong Zhang sprocs = gen_to->procs; 4732a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 4733e42f35eeSHong Zhang sbs = gen_to->bs; 4734e42f35eeSHong Zhang rstarts = gen_from->starts; 4735e42f35eeSHong Zhang rprocs = gen_from->procs; 4736e42f35eeSHong Zhang rbs = gen_from->bs; 4737429d309bSHong Zhang 4738dea91ad1SHong Zhang if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 4739429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4740a6b2eed2SHong Zhang /* i-array */ 4741a6b2eed2SHong Zhang /*---------*/ 4742a6b2eed2SHong Zhang /* post receives */ 4743a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4744e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4745e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 474687025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4747429d309bSHong Zhang } 4748a6b2eed2SHong Zhang 4749a6b2eed2SHong Zhang /* pack the outgoing message */ 475087025532SHong Zhang ierr = PetscMalloc((nsends+nrecvs+3)*sizeof(PetscInt),&sstartsj);CHKERRQ(ierr); 4751a6b2eed2SHong Zhang rstartsj = sstartsj + nsends +1; 4752a6b2eed2SHong Zhang sstartsj[0] = 0; rstartsj[0] = 0; 4753a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 4754a6b2eed2SHong Zhang k = 0; 4755a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4756e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 4757e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 475887025532SHong Zhang for (j=0; j<nrows; j++) { 4759d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 4760e42f35eeSHong Zhang for (l=0; l<sbs; l++){ 4761e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */ 4762e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 4763e42f35eeSHong Zhang len += ncols; 4764e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 4765e42f35eeSHong Zhang } 4766a6b2eed2SHong Zhang k++; 4767429d309bSHong Zhang } 4768e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4769dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 4770429d309bSHong Zhang } 477187025532SHong Zhang /* recvs and sends of i-array are completed */ 477287025532SHong Zhang i = nrecvs; 477387025532SHong Zhang while (i--) { 4774aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 477587025532SHong Zhang } 47760c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4777e42f35eeSHong Zhang 4778a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 4779a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 4780a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 4781a6b2eed2SHong Zhang 478287025532SHong Zhang /* create i-array of B_oth */ 478387025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 478487025532SHong Zhang b_othi[0] = 0; 4785a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 4786a6b2eed2SHong Zhang k = 0; 4787a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4788fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4789e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 479087025532SHong Zhang for (j=0; j<nrows; j++) { 479187025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 4792a6b2eed2SHong Zhang len += rowlen[j]; k++; 4793a6b2eed2SHong Zhang } 4794dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 4795a6b2eed2SHong Zhang } 4796a6b2eed2SHong Zhang 479787025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 479887025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 4799dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 4800a6b2eed2SHong Zhang 480187025532SHong Zhang /* j-array */ 480287025532SHong Zhang /*---------*/ 4803a6b2eed2SHong Zhang /* post receives of j-array */ 4804a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 480587025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 480687025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4807a6b2eed2SHong Zhang } 4808e42f35eeSHong Zhang 4809e42f35eeSHong Zhang /* pack the outgoing message j-array */ 4810a6b2eed2SHong Zhang k = 0; 4811a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4812e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 4813a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 481487025532SHong Zhang for (j=0; j<nrows; j++) { 4815d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4816e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4817e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4818a6b2eed2SHong Zhang for (l=0; l<ncols; l++){ 4819a6b2eed2SHong Zhang *bufJ++ = cols[l]; 482087025532SHong Zhang } 4821e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4822e42f35eeSHong Zhang } 482387025532SHong Zhang } 482487025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 482587025532SHong Zhang } 482687025532SHong Zhang 482787025532SHong Zhang /* recvs and sends of j-array are completed */ 482887025532SHong Zhang i = nrecvs; 482987025532SHong Zhang while (i--) { 4830aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 483187025532SHong Zhang } 48320c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 483387025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 483487025532SHong Zhang sstartsj = *startsj; 483587025532SHong Zhang rstartsj = sstartsj + nsends +1; 483687025532SHong Zhang bufa = *bufa_ptr; 483787025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 483887025532SHong Zhang b_otha = b_oth->a; 483987025532SHong Zhang } else { 484087025532SHong Zhang SETERRQ(PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 484187025532SHong Zhang } 484287025532SHong Zhang 484387025532SHong Zhang /* a-array */ 484487025532SHong Zhang /*---------*/ 484587025532SHong Zhang /* post receives of a-array */ 484687025532SHong Zhang for (i=0; i<nrecvs; i++){ 484787025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 484887025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 484987025532SHong Zhang } 4850e42f35eeSHong Zhang 4851e42f35eeSHong Zhang /* pack the outgoing message a-array */ 485287025532SHong Zhang k = 0; 485387025532SHong Zhang for (i=0; i<nsends; i++){ 4854e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 485587025532SHong Zhang bufA = bufa+sstartsj[i]; 485687025532SHong Zhang for (j=0; j<nrows; j++) { 4857d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4858e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4859e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 486087025532SHong Zhang for (l=0; l<ncols; l++){ 4861a6b2eed2SHong Zhang *bufA++ = vals[l]; 4862a6b2eed2SHong Zhang } 4863e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 4864e42f35eeSHong Zhang } 4865a6b2eed2SHong Zhang } 486687025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4867a6b2eed2SHong Zhang } 486887025532SHong Zhang /* recvs and sends of a-array are completed */ 486987025532SHong Zhang i = nrecvs; 487087025532SHong Zhang while (i--) { 4871aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 487287025532SHong Zhang } 48730c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4874d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 4875a6b2eed2SHong Zhang 487687025532SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4877a6b2eed2SHong Zhang /* put together the new matrix */ 4878d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 4879a6b2eed2SHong Zhang 4880a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4881a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 488287025532SHong Zhang b_oth = (Mat_SeqAIJ *)(*B_oth)->data; 4883e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 4884e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 488587025532SHong Zhang b_oth->nonew = 0; 4886a6b2eed2SHong Zhang 4887a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 4888dea91ad1SHong Zhang if (!startsj || !bufa_ptr){ 4889dea91ad1SHong Zhang ierr = PetscFree(sstartsj);CHKERRQ(ierr); 4890dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 4891dea91ad1SHong Zhang } else { 489287025532SHong Zhang *startsj = sstartsj; 489387025532SHong Zhang *bufa_ptr = bufa; 489487025532SHong Zhang } 4895dea91ad1SHong Zhang } 48964ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4897429d309bSHong Zhang PetscFunctionReturn(0); 4898429d309bSHong Zhang } 4899ccd8e176SBarry Smith 490043eb5e2fSMatthew Knepley #undef __FUNCT__ 490143eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 490243eb5e2fSMatthew Knepley /*@C 490343eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 490443eb5e2fSMatthew Knepley 490543eb5e2fSMatthew Knepley Not Collective 490643eb5e2fSMatthew Knepley 490743eb5e2fSMatthew Knepley Input Parameters: 490843eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 490943eb5e2fSMatthew Knepley 491043eb5e2fSMatthew Knepley Output Parameter: 491143eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 491243eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 491343eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 491443eb5e2fSMatthew Knepley 491543eb5e2fSMatthew Knepley Level: developer 491643eb5e2fSMatthew Knepley 491743eb5e2fSMatthew Knepley @*/ 491843eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE) 491943eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 492043eb5e2fSMatthew Knepley #else 492143eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 492243eb5e2fSMatthew Knepley #endif 492343eb5e2fSMatthew Knepley { 492443eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 492543eb5e2fSMatthew Knepley 492643eb5e2fSMatthew Knepley PetscFunctionBegin; 492743eb5e2fSMatthew Knepley PetscValidHeaderSpecific(A, MAT_COOKIE, 1); 492843eb5e2fSMatthew Knepley PetscValidPointer(lvec, 2) 492943eb5e2fSMatthew Knepley PetscValidPointer(colmap, 3) 493043eb5e2fSMatthew Knepley PetscValidPointer(multScatter, 4) 493143eb5e2fSMatthew Knepley a = (Mat_MPIAIJ *) A->data; 493243eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 493343eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 493443eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 493543eb5e2fSMatthew Knepley PetscFunctionReturn(0); 493643eb5e2fSMatthew Knepley } 493743eb5e2fSMatthew Knepley 493817667f90SBarry Smith EXTERN_C_BEGIN 49398cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICRL(Mat,const MatType,MatReuse,Mat*); 49408cf70c4bSSatish Balay extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPICSRPERM(Mat,const MatType,MatReuse,Mat*); 494117667f90SBarry Smith EXTERN_C_END 494217667f90SBarry Smith 4943fc4dec0aSBarry Smith #undef __FUNCT__ 4944fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 4945fc4dec0aSBarry Smith /* 4946fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 4947fc4dec0aSBarry Smith 4948fc4dec0aSBarry Smith n p p 4949fc4dec0aSBarry Smith ( ) ( ) ( ) 4950fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 4951fc4dec0aSBarry Smith ( ) ( ) ( ) 4952fc4dec0aSBarry Smith 4953fc4dec0aSBarry Smith */ 4954fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 4955fc4dec0aSBarry Smith { 4956fc4dec0aSBarry Smith PetscErrorCode ierr; 4957fc4dec0aSBarry Smith Mat At,Bt,Ct; 4958fc4dec0aSBarry Smith 4959fc4dec0aSBarry Smith PetscFunctionBegin; 4960fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 4961fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 4962fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 4963fc4dec0aSBarry Smith ierr = MatDestroy(At);CHKERRQ(ierr); 4964fc4dec0aSBarry Smith ierr = MatDestroy(Bt);CHKERRQ(ierr); 4965fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 4966e5e4356aSBarry Smith ierr = MatDestroy(Ct);CHKERRQ(ierr); 4967fc4dec0aSBarry Smith PetscFunctionReturn(0); 4968fc4dec0aSBarry Smith } 4969fc4dec0aSBarry Smith 4970fc4dec0aSBarry Smith #undef __FUNCT__ 4971fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 4972fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 4973fc4dec0aSBarry Smith { 4974fc4dec0aSBarry Smith PetscErrorCode ierr; 4975d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 4976fc4dec0aSBarry Smith Mat Cmat; 4977fc4dec0aSBarry Smith 4978fc4dec0aSBarry Smith PetscFunctionBegin; 4979d0f46423SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 498039804f7cSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr); 4981fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 4982fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 4983fc4dec0aSBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr); 498438556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 498538556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4986fc4dec0aSBarry Smith *C = Cmat; 4987fc4dec0aSBarry Smith PetscFunctionReturn(0); 4988fc4dec0aSBarry Smith } 4989fc4dec0aSBarry Smith 4990fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 4991fc4dec0aSBarry Smith #undef __FUNCT__ 4992fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 4993fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 4994fc4dec0aSBarry Smith { 4995fc4dec0aSBarry Smith PetscErrorCode ierr; 4996fc4dec0aSBarry Smith 4997fc4dec0aSBarry Smith PetscFunctionBegin; 4998fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX){ 4999fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 5000fc4dec0aSBarry Smith } 5001fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 5002fc4dec0aSBarry Smith PetscFunctionReturn(0); 5003fc4dec0aSBarry Smith } 5004fc4dec0aSBarry Smith 50055c9eb25fSBarry Smith EXTERN_C_BEGIN 5006611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 50075c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_mumps(Mat,MatFactorType,Mat*); 5008611f576cSBarry Smith #endif 50093bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 50103bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 50113bf14a46SMatthew Knepley #endif 5012611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 50135c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 5014611f576cSBarry Smith #endif 5015611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 50165c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*); 5017611f576cSBarry Smith #endif 50185c9eb25fSBarry Smith EXTERN_C_END 50195c9eb25fSBarry Smith 5020ccd8e176SBarry Smith /*MC 5021ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 5022ccd8e176SBarry Smith 5023ccd8e176SBarry Smith Options Database Keys: 5024ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 5025ccd8e176SBarry Smith 5026ccd8e176SBarry Smith Level: beginner 5027ccd8e176SBarry Smith 5028175b88e8SBarry Smith .seealso: MatCreateMPIAIJ() 5029ccd8e176SBarry Smith M*/ 5030ccd8e176SBarry Smith 5031ccd8e176SBarry Smith EXTERN_C_BEGIN 5032ccd8e176SBarry Smith #undef __FUNCT__ 5033ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 5034be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B) 5035ccd8e176SBarry Smith { 5036ccd8e176SBarry Smith Mat_MPIAIJ *b; 5037ccd8e176SBarry Smith PetscErrorCode ierr; 5038ccd8e176SBarry Smith PetscMPIInt size; 5039ccd8e176SBarry Smith 5040ccd8e176SBarry Smith PetscFunctionBegin; 50417adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr); 5042ccd8e176SBarry Smith 504338f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 5044ccd8e176SBarry Smith B->data = (void*)b; 5045ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 5046d0f46423SBarry Smith B->rmap->bs = 1; 5047ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 5048ccd8e176SBarry Smith B->mapping = 0; 5049ccd8e176SBarry Smith 5050ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 5051ccd8e176SBarry Smith b->size = size; 50527adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 5053ccd8e176SBarry Smith 5054ccd8e176SBarry Smith /* build cache for off array entries formed */ 50557adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 5056ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 5057ccd8e176SBarry Smith b->colmap = 0; 5058ccd8e176SBarry Smith b->garray = 0; 5059ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 5060ccd8e176SBarry Smith 5061ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 5062ccd8e176SBarry Smith b->lvec = PETSC_NULL; 5063ccd8e176SBarry Smith b->Mvctx = PETSC_NULL; 5064ccd8e176SBarry Smith 5065ccd8e176SBarry Smith /* stuff for MatGetRow() */ 5066ccd8e176SBarry Smith b->rowindices = 0; 5067ccd8e176SBarry Smith b->rowvalues = 0; 5068ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 5069ccd8e176SBarry Smith 5070611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 5071ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_spooles_C", 50725c9eb25fSBarry Smith "MatGetFactor_mpiaij_spooles", 50735c9eb25fSBarry Smith MatGetFactor_mpiaij_spooles);CHKERRQ(ierr); 5074611f576cSBarry Smith #endif 5075611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5076ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mumps_C", 50775c9eb25fSBarry Smith "MatGetFactor_mpiaij_mumps", 50785c9eb25fSBarry Smith MatGetFactor_mpiaij_mumps);CHKERRQ(ierr); 5079611f576cSBarry Smith #endif 50803bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 5081ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_pastix_C", 50823bf14a46SMatthew Knepley "MatGetFactor_mpiaij_pastix", 50833bf14a46SMatthew Knepley MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 50843bf14a46SMatthew Knepley #endif 5085611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 5086ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_superlu_dist_C", 50875c9eb25fSBarry Smith "MatGetFactor_mpiaij_superlu_dist", 50885c9eb25fSBarry Smith MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5089611f576cSBarry Smith #endif 5090ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 5091ccd8e176SBarry Smith "MatStoreValues_MPIAIJ", 5092ccd8e176SBarry Smith MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5093ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 5094ccd8e176SBarry Smith "MatRetrieveValues_MPIAIJ", 5095ccd8e176SBarry Smith MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5096ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 5097ccd8e176SBarry Smith "MatGetDiagonalBlock_MPIAIJ", 5098ccd8e176SBarry Smith MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5099ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C", 5100ccd8e176SBarry Smith "MatIsTranspose_MPIAIJ", 5101ccd8e176SBarry Smith MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5102ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C", 5103ccd8e176SBarry Smith "MatMPIAIJSetPreallocation_MPIAIJ", 5104ccd8e176SBarry Smith MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5105ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C", 5106ccd8e176SBarry Smith "MatMPIAIJSetPreallocationCSR_MPIAIJ", 5107ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5108ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C", 5109ccd8e176SBarry Smith "MatDiagonalScaleLocal_MPIAIJ", 5110ccd8e176SBarry Smith MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 511117667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicsrperm_C", 511217667f90SBarry Smith "MatConvert_MPIAIJ_MPICSRPERM", 511317667f90SBarry Smith MatConvert_MPIAIJ_MPICSRPERM);CHKERRQ(ierr); 511417667f90SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpicrl_C", 511517667f90SBarry Smith "MatConvert_MPIAIJ_MPICRL", 511617667f90SBarry Smith MatConvert_MPIAIJ_MPICRL);CHKERRQ(ierr); 5117fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C", 5118fc4dec0aSBarry Smith "MatMatMult_MPIDense_MPIAIJ", 5119fc4dec0aSBarry Smith MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5120fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C", 5121fc4dec0aSBarry Smith "MatMatMultSymbolic_MPIDense_MPIAIJ", 5122fc4dec0aSBarry Smith MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5123fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C", 5124fc4dec0aSBarry Smith "MatMatMultNumeric_MPIDense_MPIAIJ", 5125fc4dec0aSBarry Smith MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 512617667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5127ccd8e176SBarry Smith PetscFunctionReturn(0); 5128ccd8e176SBarry Smith } 5129ccd8e176SBarry Smith EXTERN_C_END 513081824310SBarry Smith 513103bfb495SBarry Smith #undef __FUNCT__ 513203bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 513358d36128SBarry Smith /*@ 513403bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 513503bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 513603bfb495SBarry Smith 513703bfb495SBarry Smith Collective on MPI_Comm 513803bfb495SBarry Smith 513903bfb495SBarry Smith Input Parameters: 514003bfb495SBarry Smith + comm - MPI communicator 514103bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 514203bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 514303bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 514403bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 514503bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 514603bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 514703bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 514803bfb495SBarry Smith . j - column indices 514903bfb495SBarry Smith . a - matrix values 515003bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 515103bfb495SBarry Smith . oj - column indices 515203bfb495SBarry Smith - oa - matrix values 515303bfb495SBarry Smith 515403bfb495SBarry Smith Output Parameter: 515503bfb495SBarry Smith . mat - the matrix 515603bfb495SBarry Smith 515703bfb495SBarry Smith Level: advanced 515803bfb495SBarry Smith 515903bfb495SBarry Smith Notes: 516003bfb495SBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. 516103bfb495SBarry Smith 516203bfb495SBarry Smith The i and j indices are 0 based 516303bfb495SBarry Smith 516403bfb495SBarry Smith See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 516503bfb495SBarry Smith 51667b55108eSBarry Smith This sets local rows and cannot be used to set off-processor values. 51677b55108eSBarry Smith 51687b55108eSBarry Smith You cannot later use MatSetValues() to change values in this matrix. 516903bfb495SBarry Smith 517003bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 517103bfb495SBarry Smith 517203bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 51738d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays() 517403bfb495SBarry Smith @*/ 51758d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[], 517603bfb495SBarry Smith PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 517703bfb495SBarry Smith { 517803bfb495SBarry Smith PetscErrorCode ierr; 517903bfb495SBarry Smith Mat_MPIAIJ *maij; 518003bfb495SBarry Smith 518103bfb495SBarry Smith PetscFunctionBegin; 518203bfb495SBarry Smith if (m < 0) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 518303bfb495SBarry Smith if (i[0]) { 518403bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 518503bfb495SBarry Smith } 518603bfb495SBarry Smith if (oi[0]) { 518703bfb495SBarry Smith SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 518803bfb495SBarry Smith } 518903bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 519003bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 519103bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 519203bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 51938d7a6e47SBarry Smith maij->donotstash = PETSC_TRUE; 51948d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 519503bfb495SBarry Smith 519626283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr); 519726283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr); 519826283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr); 519926283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr); 520003bfb495SBarry Smith 520103bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5202d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 520303bfb495SBarry Smith 52048d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52058d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52068d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52078d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52088d7a6e47SBarry Smith 520903bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 521003bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 521103bfb495SBarry Smith PetscFunctionReturn(0); 521203bfb495SBarry Smith } 521303bfb495SBarry Smith 521481824310SBarry Smith /* 521581824310SBarry Smith Special version for direct calls from Fortran 521681824310SBarry Smith */ 521781824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 521881824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 521981824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 522081824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 522181824310SBarry Smith #endif 522281824310SBarry Smith 522381824310SBarry Smith /* Change these macros so can be used in void function */ 522481824310SBarry Smith #undef CHKERRQ 52257adad957SLisandro Dalcin #define CHKERRQ(ierr) CHKERRABORT(((PetscObject)mat)->comm,ierr) 522681824310SBarry Smith #undef SETERRQ2 52277adad957SLisandro Dalcin #define SETERRQ2(ierr,b,c,d) CHKERRABORT(((PetscObject)mat)->comm,ierr) 522881824310SBarry Smith #undef SETERRQ 52297adad957SLisandro Dalcin #define SETERRQ(ierr,b) CHKERRABORT(((PetscObject)mat)->comm,ierr) 523081824310SBarry Smith 523181824310SBarry Smith EXTERN_C_BEGIN 523281824310SBarry Smith #undef __FUNCT__ 523381824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 52341f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 523581824310SBarry Smith { 523681824310SBarry Smith Mat mat = *mmat; 523781824310SBarry Smith PetscInt m = *mm, n = *mn; 523881824310SBarry Smith InsertMode addv = *maddv; 523981824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 524081824310SBarry Smith PetscScalar value; 524181824310SBarry Smith PetscErrorCode ierr; 5242899cda47SBarry Smith 5243d9e2c085SLisandro Dalcin ierr = MatPreallocated(mat);CHKERRQ(ierr); 524481824310SBarry Smith if (mat->insertmode == NOT_SET_VALUES) { 524581824310SBarry Smith mat->insertmode = addv; 524681824310SBarry Smith } 524781824310SBarry Smith #if defined(PETSC_USE_DEBUG) 524881824310SBarry Smith else if (mat->insertmode != addv) { 524981824310SBarry Smith SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 525081824310SBarry Smith } 525181824310SBarry Smith #endif 525281824310SBarry Smith { 5253d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5254d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 525581824310SBarry Smith PetscTruth roworiented = aij->roworiented; 525681824310SBarry Smith 525781824310SBarry Smith /* Some Variables required in the macro */ 525881824310SBarry Smith Mat A = aij->A; 525981824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 526081824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5261dd6ea824SBarry Smith MatScalar *aa = a->a; 526281824310SBarry Smith PetscTruth ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE); 526381824310SBarry Smith Mat B = aij->B; 526481824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5265d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5266dd6ea824SBarry Smith MatScalar *ba = b->a; 526781824310SBarry Smith 526881824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 526981824310SBarry Smith PetscInt nonew = a->nonew; 5270dd6ea824SBarry Smith MatScalar *ap1,*ap2; 527181824310SBarry Smith 527281824310SBarry Smith PetscFunctionBegin; 527381824310SBarry Smith for (i=0; i<m; i++) { 527481824310SBarry Smith if (im[i] < 0) continue; 527581824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5276d0f46423SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 527781824310SBarry Smith #endif 527881824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 527981824310SBarry Smith row = im[i] - rstart; 528081824310SBarry Smith lastcol1 = -1; 528181824310SBarry Smith rp1 = aj + ai[row]; 528281824310SBarry Smith ap1 = aa + ai[row]; 528381824310SBarry Smith rmax1 = aimax[row]; 528481824310SBarry Smith nrow1 = ailen[row]; 528581824310SBarry Smith low1 = 0; 528681824310SBarry Smith high1 = nrow1; 528781824310SBarry Smith lastcol2 = -1; 528881824310SBarry Smith rp2 = bj + bi[row]; 528981824310SBarry Smith ap2 = ba + bi[row]; 529081824310SBarry Smith rmax2 = bimax[row]; 529181824310SBarry Smith nrow2 = bilen[row]; 529281824310SBarry Smith low2 = 0; 529381824310SBarry Smith high2 = nrow2; 529481824310SBarry Smith 529581824310SBarry Smith for (j=0; j<n; j++) { 529681824310SBarry Smith if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 529781824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 529881824310SBarry Smith if (in[j] >= cstart && in[j] < cend){ 529981824310SBarry Smith col = in[j] - cstart; 530081824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 530181824310SBarry Smith } else if (in[j] < 0) continue; 530281824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5303d0f46423SBarry Smith else if (in[j] >= mat->cmap->N) {SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1);} 530481824310SBarry Smith #endif 530581824310SBarry Smith else { 530681824310SBarry Smith if (mat->was_assembled) { 530781824310SBarry Smith if (!aij->colmap) { 530881824310SBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 530981824310SBarry Smith } 531081824310SBarry Smith #if defined (PETSC_USE_CTABLE) 531181824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 531281824310SBarry Smith col--; 531381824310SBarry Smith #else 531481824310SBarry Smith col = aij->colmap[in[j]] - 1; 531581824310SBarry Smith #endif 531681824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 531781824310SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 531881824310SBarry Smith col = in[j]; 531981824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 532081824310SBarry Smith B = aij->B; 532181824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 532281824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 532381824310SBarry Smith rp2 = bj + bi[row]; 532481824310SBarry Smith ap2 = ba + bi[row]; 532581824310SBarry Smith rmax2 = bimax[row]; 532681824310SBarry Smith nrow2 = bilen[row]; 532781824310SBarry Smith low2 = 0; 532881824310SBarry Smith high2 = nrow2; 5329d0f46423SBarry Smith bm = aij->B->rmap->n; 533081824310SBarry Smith ba = b->a; 533181824310SBarry Smith } 533281824310SBarry Smith } else col = in[j]; 533381824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 533481824310SBarry Smith } 533581824310SBarry Smith } 533681824310SBarry Smith } else { 533781824310SBarry Smith if (!aij->donotstash) { 533881824310SBarry Smith if (roworiented) { 53393b024144SHong Zhang ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 534081824310SBarry Smith } else { 53413b024144SHong Zhang ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscTruth)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 534281824310SBarry Smith } 534381824310SBarry Smith } 534481824310SBarry Smith } 534581824310SBarry Smith }} 534681824310SBarry Smith PetscFunctionReturnVoid(); 534781824310SBarry Smith } 534881824310SBarry Smith EXTERN_C_END 534903bfb495SBarry Smith 5350