1be1d678aSKris Buschelman #define PETSCMAT_DLL 28a729477SBarry Smith 37c4f633dSBarry Smith #include "../src/mat/impls/aij/mpi/mpiaij.h" /*I "petscmat.h" I*/ 4f3da1532SBarry Smith #include "petscblaslapack.h" 58a729477SBarry Smith 6dd6ea824SBarry Smith #undef __FUNCT__ 7dd6ea824SBarry Smith #define __FUNCT__ "MatDistribute_MPIAIJ" 8dd6ea824SBarry Smith /* 9dd6ea824SBarry Smith Distributes a SeqAIJ matrix across a set of processes. Code stolen from 10dd6ea824SBarry Smith MatLoad_MPIAIJ(). Horrible lack of reuse. Should be a routine for each matrix type. 11dd6ea824SBarry Smith 12dd6ea824SBarry Smith Only for square matrices 13dd6ea824SBarry Smith */ 14dd6ea824SBarry Smith PetscErrorCode MatDistribute_MPIAIJ(MPI_Comm comm,Mat gmat,PetscInt m,MatReuse reuse,Mat *inmat) 15dd6ea824SBarry Smith { 16dd6ea824SBarry Smith PetscMPIInt rank,size; 17dd6ea824SBarry Smith PetscInt *rowners,*dlens,*olens,i,rstart,rend,j,jj,nz,*gmataj,cnt,row,*ld; 18dd6ea824SBarry Smith PetscErrorCode ierr; 19dd6ea824SBarry Smith Mat mat; 20dd6ea824SBarry Smith Mat_SeqAIJ *gmata; 21dd6ea824SBarry Smith PetscMPIInt tag; 22dd6ea824SBarry Smith MPI_Status status; 23ace3abfcSBarry Smith PetscBool aij; 24dd6ea824SBarry Smith MatScalar *gmataa,*ao,*ad,*gmataarestore=0; 25dd6ea824SBarry Smith 26dd6ea824SBarry Smith PetscFunctionBegin; 27dd6ea824SBarry Smith CHKMEMQ; 28dd6ea824SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 29dd6ea824SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 30dd6ea824SBarry Smith if (!rank) { 31dd6ea824SBarry Smith ierr = PetscTypeCompare((PetscObject)gmat,MATSEQAIJ,&aij);CHKERRQ(ierr); 3265e19b50SBarry Smith if (!aij) SETERRQ1(((PetscObject)gmat)->comm,PETSC_ERR_SUP,"Currently no support for input matrix of type %s\n",((PetscObject)gmat)->type_name); 33dd6ea824SBarry Smith } 34dd6ea824SBarry Smith if (reuse == MAT_INITIAL_MATRIX) { 35dd6ea824SBarry Smith ierr = MatCreate(comm,&mat);CHKERRQ(ierr); 36dd6ea824SBarry Smith ierr = MatSetSizes(mat,m,m,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 37dd6ea824SBarry Smith ierr = MatSetType(mat,MATAIJ);CHKERRQ(ierr); 38dd6ea824SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 39dd6ea824SBarry Smith ierr = PetscMalloc2(m,PetscInt,&dlens,m,PetscInt,&olens);CHKERRQ(ierr); 40dd6ea824SBarry Smith ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 41dd6ea824SBarry Smith rowners[0] = 0; 42dd6ea824SBarry Smith for (i=2; i<=size; i++) { 43dd6ea824SBarry Smith rowners[i] += rowners[i-1]; 44dd6ea824SBarry Smith } 45dd6ea824SBarry Smith rstart = rowners[rank]; 46dd6ea824SBarry Smith rend = rowners[rank+1]; 47dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 48dd6ea824SBarry Smith if (!rank) { 49dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 50dd6ea824SBarry Smith /* send row lengths to all processors */ 51dd6ea824SBarry Smith for (i=0; i<m; i++) dlens[i] = gmata->ilen[i]; 52dd6ea824SBarry Smith for (i=1; i<size; i++) { 53dd6ea824SBarry Smith ierr = MPI_Send(gmata->ilen + rowners[i],rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 54dd6ea824SBarry Smith } 55dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 56dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 57dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 58dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 59dd6ea824SBarry Smith jj = 0; 60dd6ea824SBarry Smith for (i=0; i<m; i++) { 61dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 62dd6ea824SBarry Smith if (gmata->j[jj] < rstart) ld[i]++; 63dd6ea824SBarry Smith if (gmata->j[jj] < rstart || gmata->j[jj] >= rend) olens[i]++; 64dd6ea824SBarry Smith jj++; 65dd6ea824SBarry Smith } 66dd6ea824SBarry Smith } 67dd6ea824SBarry Smith /* send column indices to other processes */ 68dd6ea824SBarry Smith for (i=1; i<size; i++) { 69dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 70dd6ea824SBarry Smith ierr = MPI_Send(&nz,1,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 71dd6ea824SBarry Smith ierr = MPI_Send(gmata->j + gmata->i[rowners[i]],nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 72dd6ea824SBarry Smith } 73dd6ea824SBarry Smith 74dd6ea824SBarry Smith /* send numerical values to other processes */ 75dd6ea824SBarry Smith for (i=1; i<size; i++) { 76dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 77dd6ea824SBarry Smith ierr = MPI_Send(gmata->a + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 78dd6ea824SBarry Smith } 79dd6ea824SBarry Smith gmataa = gmata->a; 80dd6ea824SBarry Smith gmataj = gmata->j; 81dd6ea824SBarry Smith 82dd6ea824SBarry Smith } else { 83dd6ea824SBarry Smith /* receive row lengths */ 84dd6ea824SBarry Smith ierr = MPI_Recv(dlens,m,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 85dd6ea824SBarry Smith /* receive column indices */ 86dd6ea824SBarry Smith ierr = MPI_Recv(&nz,1,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 87dd6ea824SBarry Smith ierr = PetscMalloc2(nz,PetscScalar,&gmataa,nz,PetscInt,&gmataj);CHKERRQ(ierr); 88dd6ea824SBarry Smith ierr = MPI_Recv(gmataj,nz,MPIU_INT,0,tag,comm,&status);CHKERRQ(ierr); 89dd6ea824SBarry Smith /* determine number diagonal and off-diagonal counts */ 90dd6ea824SBarry Smith ierr = PetscMemzero(olens,m*sizeof(PetscInt));CHKERRQ(ierr); 91dd6ea824SBarry Smith ierr = PetscMalloc(m*sizeof(PetscInt),&ld);CHKERRQ(ierr); 92dd6ea824SBarry Smith ierr = PetscMemzero(ld,m*sizeof(PetscInt));CHKERRQ(ierr); 93dd6ea824SBarry Smith jj = 0; 94dd6ea824SBarry Smith for (i=0; i<m; i++) { 95dd6ea824SBarry Smith for (j=0; j<dlens[i]; j++) { 96dd6ea824SBarry Smith if (gmataj[jj] < rstart) ld[i]++; 97dd6ea824SBarry Smith if (gmataj[jj] < rstart || gmataj[jj] >= rend) olens[i]++; 98dd6ea824SBarry Smith jj++; 99dd6ea824SBarry Smith } 100dd6ea824SBarry Smith } 101dd6ea824SBarry Smith /* receive numerical values */ 102dd6ea824SBarry Smith ierr = PetscMemzero(gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); 103dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 104dd6ea824SBarry Smith } 105dd6ea824SBarry Smith /* set preallocation */ 106dd6ea824SBarry Smith for (i=0; i<m; i++) { 107dd6ea824SBarry Smith dlens[i] -= olens[i]; 108dd6ea824SBarry Smith } 109dd6ea824SBarry Smith ierr = MatSeqAIJSetPreallocation(mat,0,dlens);CHKERRQ(ierr); 110dd6ea824SBarry Smith ierr = MatMPIAIJSetPreallocation(mat,0,dlens,0,olens);CHKERRQ(ierr); 111dd6ea824SBarry Smith 112dd6ea824SBarry Smith for (i=0; i<m; i++) { 113dd6ea824SBarry Smith dlens[i] += olens[i]; 114dd6ea824SBarry Smith } 115dd6ea824SBarry Smith cnt = 0; 116dd6ea824SBarry Smith for (i=0; i<m; i++) { 117dd6ea824SBarry Smith row = rstart + i; 118dd6ea824SBarry Smith ierr = MatSetValues(mat,1,&row,dlens[i],gmataj+cnt,gmataa+cnt,INSERT_VALUES);CHKERRQ(ierr); 119dd6ea824SBarry Smith cnt += dlens[i]; 120dd6ea824SBarry Smith } 121dd6ea824SBarry Smith if (rank) { 122dd6ea824SBarry Smith ierr = PetscFree2(gmataa,gmataj);CHKERRQ(ierr); 123dd6ea824SBarry Smith } 124dd6ea824SBarry Smith ierr = PetscFree2(dlens,olens);CHKERRQ(ierr); 125dd6ea824SBarry Smith ierr = PetscFree(rowners);CHKERRQ(ierr); 126dd6ea824SBarry Smith ((Mat_MPIAIJ*)(mat->data))->ld = ld; 127dd6ea824SBarry Smith *inmat = mat; 128dd6ea824SBarry Smith } else { /* column indices are already set; only need to move over numerical values from process 0 */ 129dd6ea824SBarry Smith Mat_SeqAIJ *Ad = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->A->data; 130dd6ea824SBarry Smith Mat_SeqAIJ *Ao = (Mat_SeqAIJ*)((Mat_MPIAIJ*)((*inmat)->data))->B->data; 131dd6ea824SBarry Smith mat = *inmat; 132dd6ea824SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mat,&tag);CHKERRQ(ierr); 133dd6ea824SBarry Smith if (!rank) { 134dd6ea824SBarry Smith /* send numerical values to other processes */ 135dd6ea824SBarry Smith gmata = (Mat_SeqAIJ*) gmat->data; 136dd6ea824SBarry Smith ierr = MatGetOwnershipRanges(mat,(const PetscInt**)&rowners);CHKERRQ(ierr); 137dd6ea824SBarry Smith gmataa = gmata->a; 138dd6ea824SBarry Smith for (i=1; i<size; i++) { 139dd6ea824SBarry Smith nz = gmata->i[rowners[i+1]]-gmata->i[rowners[i]]; 140dd6ea824SBarry Smith ierr = MPI_Send(gmataa + gmata->i[rowners[i]],nz,MPIU_SCALAR,i,tag,comm);CHKERRQ(ierr); 141dd6ea824SBarry Smith } 142dd6ea824SBarry Smith nz = gmata->i[rowners[1]]-gmata->i[rowners[0]]; 143dd6ea824SBarry Smith } else { 144dd6ea824SBarry Smith /* receive numerical values from process 0*/ 145dd6ea824SBarry Smith nz = Ad->nz + Ao->nz; 146dd6ea824SBarry Smith ierr = PetscMalloc(nz*sizeof(PetscScalar),&gmataa);CHKERRQ(ierr); gmataarestore = gmataa; 147dd6ea824SBarry Smith ierr = MPI_Recv(gmataa,nz,MPIU_SCALAR,0,tag,comm,&status);CHKERRQ(ierr); 148dd6ea824SBarry Smith } 149dd6ea824SBarry Smith /* transfer numerical values into the diagonal A and off diagonal B parts of mat */ 150dd6ea824SBarry Smith ld = ((Mat_MPIAIJ*)(mat->data))->ld; 151dd6ea824SBarry Smith ad = Ad->a; 152dd6ea824SBarry Smith ao = Ao->a; 153d0f46423SBarry Smith if (mat->rmap->n) { 154dd6ea824SBarry Smith i = 0; 155dd6ea824SBarry Smith nz = ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 156dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 157dd6ea824SBarry Smith } 158d0f46423SBarry Smith for (i=1; i<mat->rmap->n; i++) { 159dd6ea824SBarry Smith nz = Ao->i[i] - Ao->i[i-1] - ld[i-1] + ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 160dd6ea824SBarry Smith nz = Ad->i[i+1] - Ad->i[i]; ierr = PetscMemcpy(ad,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ad += nz; gmataa += nz; 161dd6ea824SBarry Smith } 162dd6ea824SBarry Smith i--; 163d0f46423SBarry Smith if (mat->rmap->n) { 164dd6ea824SBarry Smith nz = Ao->i[i+1] - Ao->i[i] - ld[i]; ierr = PetscMemcpy(ao,gmataa,nz*sizeof(PetscScalar));CHKERRQ(ierr); ao += nz; gmataa += nz; 165dd6ea824SBarry Smith } 166dd6ea824SBarry Smith if (rank) { 167dd6ea824SBarry Smith ierr = PetscFree(gmataarestore);CHKERRQ(ierr); 168dd6ea824SBarry Smith } 169dd6ea824SBarry Smith } 170dd6ea824SBarry Smith ierr = MatAssemblyBegin(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 171dd6ea824SBarry Smith ierr = MatAssemblyEnd(mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 172dd6ea824SBarry Smith CHKMEMQ; 173dd6ea824SBarry Smith PetscFunctionReturn(0); 174dd6ea824SBarry Smith } 175dd6ea824SBarry Smith 1760f5bd95cSBarry Smith /* 1770f5bd95cSBarry Smith Local utility routine that creates a mapping from the global column 1789e25ed09SBarry Smith number to the local number in the off-diagonal part of the local 1790f5bd95cSBarry Smith storage of the matrix. When PETSC_USE_CTABLE is used this is scalable at 1800f5bd95cSBarry Smith a slightly higher hash table cost; without it it is not scalable (each processor 1810f5bd95cSBarry Smith has an order N integer array but is fast to acess. 1829e25ed09SBarry Smith */ 1834a2ae208SSatish Balay #undef __FUNCT__ 1844a2ae208SSatish Balay #define __FUNCT__ "CreateColmap_MPIAIJ_Private" 185dfbe8321SBarry Smith PetscErrorCode CreateColmap_MPIAIJ_Private(Mat mat) 1869e25ed09SBarry Smith { 18744a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1886849ba73SBarry Smith PetscErrorCode ierr; 189d0f46423SBarry Smith PetscInt n = aij->B->cmap->n,i; 190dbb450caSBarry Smith 1913a40ed3dSBarry Smith PetscFunctionBegin; 192aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 193273d9f13SBarry Smith ierr = PetscTableCreate(n,&aij->colmap);CHKERRQ(ierr); 194b1fc9764SSatish Balay for (i=0; i<n; i++){ 1950f5bd95cSBarry Smith ierr = PetscTableAdd(aij->colmap,aij->garray[i]+1,i+1);CHKERRQ(ierr); 196b1fc9764SSatish Balay } 197b1fc9764SSatish Balay #else 198d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscInt),&aij->colmap);CHKERRQ(ierr); 199d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 200d0f46423SBarry Smith ierr = PetscMemzero(aij->colmap,mat->cmap->N*sizeof(PetscInt));CHKERRQ(ierr); 201905e6a2fSBarry Smith for (i=0; i<n; i++) aij->colmap[aij->garray[i]] = i+1; 202b1fc9764SSatish Balay #endif 2033a40ed3dSBarry Smith PetscFunctionReturn(0); 2049e25ed09SBarry Smith } 2059e25ed09SBarry Smith 20630770e4dSSatish Balay #define MatSetValues_SeqAIJ_A_Private(row,col,value,addv) \ 2070520107fSSatish Balay { \ 2087cd84e04SBarry Smith if (col <= lastcol1) low1 = 0; else high1 = nrow1; \ 209fd3458f5SBarry Smith lastcol1 = col;\ 210fd3458f5SBarry Smith while (high1-low1 > 5) { \ 211fd3458f5SBarry Smith t = (low1+high1)/2; \ 212fd3458f5SBarry Smith if (rp1[t] > col) high1 = t; \ 213fd3458f5SBarry Smith else low1 = t; \ 214ba4e3ef2SSatish Balay } \ 215fd3458f5SBarry Smith for (_i=low1; _i<high1; _i++) { \ 216fd3458f5SBarry Smith if (rp1[_i] > col) break; \ 217fd3458f5SBarry Smith if (rp1[_i] == col) { \ 218fd3458f5SBarry Smith if (addv == ADD_VALUES) ap1[_i] += value; \ 219fd3458f5SBarry Smith else ap1[_i] = value; \ 22030770e4dSSatish Balay goto a_noinsert; \ 2210520107fSSatish Balay } \ 2220520107fSSatish Balay } \ 223e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low1 = 0; high1 = nrow1;goto a_noinsert;} \ 224e44c0bd4SBarry Smith if (nonew == 1) {low1 = 0; high1 = nrow1; goto a_noinsert;} \ 225e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 226fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(A,am,1,nrow1,row,col,rmax1,aa,ai,aj,rp1,ap1,aimax,nonew,MatScalar); \ 227669a8dbcSSatish Balay N = nrow1++ - 1; a->nz++; high1++; \ 2280520107fSSatish Balay /* shift up all the later entries in this row */ \ 2290520107fSSatish Balay for (ii=N; ii>=_i; ii--) { \ 230fd3458f5SBarry Smith rp1[ii+1] = rp1[ii]; \ 231fd3458f5SBarry Smith ap1[ii+1] = ap1[ii]; \ 2320520107fSSatish Balay } \ 233fd3458f5SBarry Smith rp1[_i] = col; \ 234fd3458f5SBarry Smith ap1[_i] = value; \ 23530770e4dSSatish Balay a_noinsert: ; \ 236fd3458f5SBarry Smith ailen[row] = nrow1; \ 2370520107fSSatish Balay } 2380a198c4cSBarry Smith 239085a36d4SBarry Smith 24030770e4dSSatish Balay #define MatSetValues_SeqAIJ_B_Private(row,col,value,addv) \ 24130770e4dSSatish Balay { \ 2427cd84e04SBarry Smith if (col <= lastcol2) low2 = 0; else high2 = nrow2; \ 243fd3458f5SBarry Smith lastcol2 = col;\ 244fd3458f5SBarry Smith while (high2-low2 > 5) { \ 245fd3458f5SBarry Smith t = (low2+high2)/2; \ 246fd3458f5SBarry Smith if (rp2[t] > col) high2 = t; \ 247fd3458f5SBarry Smith else low2 = t; \ 248ba4e3ef2SSatish Balay } \ 249fd3458f5SBarry Smith for (_i=low2; _i<high2; _i++) { \ 250fd3458f5SBarry Smith if (rp2[_i] > col) break; \ 251fd3458f5SBarry Smith if (rp2[_i] == col) { \ 252fd3458f5SBarry Smith if (addv == ADD_VALUES) ap2[_i] += value; \ 253fd3458f5SBarry Smith else ap2[_i] = value; \ 25430770e4dSSatish Balay goto b_noinsert; \ 25530770e4dSSatish Balay } \ 25630770e4dSSatish Balay } \ 257e44c0bd4SBarry Smith if (value == 0.0 && ignorezeroentries) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 258e44c0bd4SBarry Smith if (nonew == 1) {low2 = 0; high2 = nrow2; goto b_noinsert;} \ 259e32f2f54SBarry Smith if (nonew == -1) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Inserting a new nonzero (%D, %D) into matrix", row, col); \ 260fef13f97SBarry Smith MatSeqXAIJReallocateAIJ(B,bm,1,nrow2,row,col,rmax2,ba,bi,bj,rp2,ap2,bimax,nonew,MatScalar); \ 261669a8dbcSSatish Balay N = nrow2++ - 1; b->nz++; high2++; \ 26230770e4dSSatish Balay /* shift up all the later entries in this row */ \ 26330770e4dSSatish Balay for (ii=N; ii>=_i; ii--) { \ 264fd3458f5SBarry Smith rp2[ii+1] = rp2[ii]; \ 265fd3458f5SBarry Smith ap2[ii+1] = ap2[ii]; \ 26630770e4dSSatish Balay } \ 267fd3458f5SBarry Smith rp2[_i] = col; \ 268fd3458f5SBarry Smith ap2[_i] = value; \ 26930770e4dSSatish Balay b_noinsert: ; \ 270fd3458f5SBarry Smith bilen[row] = nrow2; \ 27130770e4dSSatish Balay } 27230770e4dSSatish Balay 2734a2ae208SSatish Balay #undef __FUNCT__ 2742fd7e33dSBarry Smith #define __FUNCT__ "MatSetValuesRow_MPIAIJ" 2752fd7e33dSBarry Smith PetscErrorCode MatSetValuesRow_MPIAIJ(Mat A,PetscInt row,const PetscScalar v[]) 2762fd7e33dSBarry Smith { 2772fd7e33dSBarry Smith Mat_MPIAIJ *mat = (Mat_MPIAIJ*)A->data; 2782fd7e33dSBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)mat->A->data,*b = (Mat_SeqAIJ*)mat->B->data; 2792fd7e33dSBarry Smith PetscErrorCode ierr; 2802fd7e33dSBarry Smith PetscInt l,*garray = mat->garray,diag; 2812fd7e33dSBarry Smith 2822fd7e33dSBarry Smith PetscFunctionBegin; 2832fd7e33dSBarry Smith /* code only works for square matrices A */ 2842fd7e33dSBarry Smith 2852fd7e33dSBarry Smith /* find size of row to the left of the diagonal part */ 2862fd7e33dSBarry Smith ierr = MatGetOwnershipRange(A,&diag,0);CHKERRQ(ierr); 2872fd7e33dSBarry Smith row = row - diag; 2882fd7e33dSBarry Smith for (l=0; l<b->i[row+1]-b->i[row]; l++) { 2892fd7e33dSBarry Smith if (garray[b->j[b->i[row]+l]] > diag) break; 2902fd7e33dSBarry Smith } 2912fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row],v,l*sizeof(PetscScalar));CHKERRQ(ierr); 2922fd7e33dSBarry Smith 2932fd7e33dSBarry Smith /* diagonal part */ 2942fd7e33dSBarry Smith ierr = PetscMemcpy(a->a+a->i[row],v+l,(a->i[row+1]-a->i[row])*sizeof(PetscScalar));CHKERRQ(ierr); 2952fd7e33dSBarry Smith 2962fd7e33dSBarry Smith /* right of diagonal part */ 2972fd7e33dSBarry Smith ierr = PetscMemcpy(b->a+b->i[row]+l,v+l+a->i[row+1]-a->i[row],(b->i[row+1]-b->i[row]-l)*sizeof(PetscScalar));CHKERRQ(ierr); 2982fd7e33dSBarry Smith PetscFunctionReturn(0); 2992fd7e33dSBarry Smith } 3002fd7e33dSBarry Smith 3012fd7e33dSBarry Smith #undef __FUNCT__ 3024a2ae208SSatish Balay #define __FUNCT__ "MatSetValues_MPIAIJ" 303b1d57f15SBarry Smith PetscErrorCode MatSetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt im[],PetscInt n,const PetscInt in[],const PetscScalar v[],InsertMode addv) 3048a729477SBarry Smith { 30544a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 30687828ca2SBarry Smith PetscScalar value; 307dfbe8321SBarry Smith PetscErrorCode ierr; 308d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 309d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 310ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 3118a729477SBarry Smith 3120520107fSSatish Balay /* Some Variables required in the macro */ 3134ee7247eSSatish Balay Mat A = aij->A; 3144ee7247eSSatish Balay Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 31557809a77SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 316a77337e4SBarry Smith MatScalar *aa = a->a; 317ace3abfcSBarry Smith PetscBool ignorezeroentries = a->ignorezeroentries; 31830770e4dSSatish Balay Mat B = aij->B; 31930770e4dSSatish Balay Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 320d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 321a77337e4SBarry Smith MatScalar *ba = b->a; 32230770e4dSSatish Balay 323fd3458f5SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 324fd3458f5SBarry Smith PetscInt nonew = a->nonew; 325a77337e4SBarry Smith MatScalar *ap1,*ap2; 3264ee7247eSSatish Balay 3273a40ed3dSBarry Smith PetscFunctionBegin; 32871fd2e92SBarry Smith if (v) PetscValidScalarPointer(v,6); 3298a729477SBarry Smith for (i=0; i<m; i++) { 3305ef9f2a5SBarry Smith if (im[i] < 0) continue; 3312515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 332e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 3330a198c4cSBarry Smith #endif 3344b0e389bSBarry Smith if (im[i] >= rstart && im[i] < rend) { 3354b0e389bSBarry Smith row = im[i] - rstart; 336fd3458f5SBarry Smith lastcol1 = -1; 337fd3458f5SBarry Smith rp1 = aj + ai[row]; 338fd3458f5SBarry Smith ap1 = aa + ai[row]; 339fd3458f5SBarry Smith rmax1 = aimax[row]; 340fd3458f5SBarry Smith nrow1 = ailen[row]; 341fd3458f5SBarry Smith low1 = 0; 342fd3458f5SBarry Smith high1 = nrow1; 343fd3458f5SBarry Smith lastcol2 = -1; 344fd3458f5SBarry Smith rp2 = bj + bi[row]; 345d498b1e9SBarry Smith ap2 = ba + bi[row]; 346fd3458f5SBarry Smith rmax2 = bimax[row]; 347d498b1e9SBarry Smith nrow2 = bilen[row]; 348fd3458f5SBarry Smith low2 = 0; 349fd3458f5SBarry Smith high2 = nrow2; 350fd3458f5SBarry Smith 3511eb62cbbSBarry Smith for (j=0; j<n; j++) { 35216371a99SBarry Smith if (v) {if (roworiented) value = v[i*n+j]; else value = v[i+j*m];} else value = 0.0; 353abc0a331SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 354fd3458f5SBarry Smith if (in[j] >= cstart && in[j] < cend){ 355fd3458f5SBarry Smith col = in[j] - cstart; 35630770e4dSSatish Balay MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 357273d9f13SBarry Smith } else if (in[j] < 0) continue; 3582515c552SBarry Smith #if defined(PETSC_USE_DEBUG) 359cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 3600a198c4cSBarry Smith #endif 3611eb62cbbSBarry Smith else { 362227d817aSBarry Smith if (mat->was_assembled) { 363905e6a2fSBarry Smith if (!aij->colmap) { 364905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 365905e6a2fSBarry Smith } 366aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 3670f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 368fa46199cSSatish Balay col--; 369b1fc9764SSatish Balay #else 370905e6a2fSBarry Smith col = aij->colmap[in[j]] - 1; 371b1fc9764SSatish Balay #endif 372ec8511deSBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 3732493cbb0SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 3744b0e389bSBarry Smith col = in[j]; 3759bf004c3SSatish Balay /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 376f9508a3cSSatish Balay B = aij->B; 377f9508a3cSSatish Balay b = (Mat_SeqAIJ*)B->data; 378e44c0bd4SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; ba = b->a; 379d498b1e9SBarry Smith rp2 = bj + bi[row]; 380d498b1e9SBarry Smith ap2 = ba + bi[row]; 381d498b1e9SBarry Smith rmax2 = bimax[row]; 382d498b1e9SBarry Smith nrow2 = bilen[row]; 383d498b1e9SBarry Smith low2 = 0; 384d498b1e9SBarry Smith high2 = nrow2; 385d0f46423SBarry Smith bm = aij->B->rmap->n; 386f9508a3cSSatish Balay ba = b->a; 387d6dfbf8fSBarry Smith } 388c48de900SBarry Smith } else col = in[j]; 38930770e4dSSatish Balay MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 3901eb62cbbSBarry Smith } 3911eb62cbbSBarry Smith } 3925ef9f2a5SBarry Smith } else { 3934cb17eb5SBarry Smith if (mat->nooffprocentries) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Setting off process row %D even though MatSetOption(,MAT_NO_OFF_PROC_ENTRIES,PETSC_TRUE) was set",im[i]); 39490f02eecSBarry Smith if (!aij->donotstash) { 395d36fbae8SSatish Balay if (roworiented) { 396ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 397d36fbae8SSatish Balay } else { 398ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 3994b0e389bSBarry Smith } 4001eb62cbbSBarry Smith } 4018a729477SBarry Smith } 40290f02eecSBarry Smith } 4033a40ed3dSBarry Smith PetscFunctionReturn(0); 4048a729477SBarry Smith } 4058a729477SBarry Smith 4064a2ae208SSatish Balay #undef __FUNCT__ 4074a2ae208SSatish Balay #define __FUNCT__ "MatGetValues_MPIAIJ" 408b1d57f15SBarry Smith PetscErrorCode MatGetValues_MPIAIJ(Mat mat,PetscInt m,const PetscInt idxm[],PetscInt n,const PetscInt idxn[],PetscScalar v[]) 409b49de8d1SLois Curfman McInnes { 410b49de8d1SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 411dfbe8321SBarry Smith PetscErrorCode ierr; 412d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 413d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 414b49de8d1SLois Curfman McInnes 4153a40ed3dSBarry Smith PetscFunctionBegin; 416b49de8d1SLois Curfman McInnes for (i=0; i<m; i++) { 417e32f2f54SBarry Smith if (idxm[i] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative row: %D",idxm[i]);*/ 418e32f2f54SBarry Smith if (idxm[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",idxm[i],mat->rmap->N-1); 419b49de8d1SLois Curfman McInnes if (idxm[i] >= rstart && idxm[i] < rend) { 420b49de8d1SLois Curfman McInnes row = idxm[i] - rstart; 421b49de8d1SLois Curfman McInnes for (j=0; j<n; j++) { 422e32f2f54SBarry Smith if (idxn[j] < 0) continue; /* SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Negative column: %D",idxn[j]); */ 423e32f2f54SBarry Smith if (idxn[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",idxn[j],mat->cmap->N-1); 424b49de8d1SLois Curfman McInnes if (idxn[j] >= cstart && idxn[j] < cend){ 425b49de8d1SLois Curfman McInnes col = idxn[j] - cstart; 426b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->A,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 427fa852ad4SSatish Balay } else { 428905e6a2fSBarry Smith if (!aij->colmap) { 429905e6a2fSBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 430905e6a2fSBarry Smith } 431aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 4320f5bd95cSBarry Smith ierr = PetscTableFind(aij->colmap,idxn[j]+1,&col);CHKERRQ(ierr); 433fa46199cSSatish Balay col --; 434b1fc9764SSatish Balay #else 435905e6a2fSBarry Smith col = aij->colmap[idxn[j]] - 1; 436b1fc9764SSatish Balay #endif 437e60e1c95SSatish Balay if ((col < 0) || (aij->garray[col] != idxn[j])) *(v+i*n+j) = 0.0; 438d9d09a02SSatish Balay else { 439b49de8d1SLois Curfman McInnes ierr = MatGetValues(aij->B,1,&row,1,&col,v+i*n+j);CHKERRQ(ierr); 440b49de8d1SLois Curfman McInnes } 441b49de8d1SLois Curfman McInnes } 442b49de8d1SLois Curfman McInnes } 443a8c6a408SBarry Smith } else { 444e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only local values currently supported"); 445b49de8d1SLois Curfman McInnes } 446b49de8d1SLois Curfman McInnes } 4473a40ed3dSBarry Smith PetscFunctionReturn(0); 448b49de8d1SLois Curfman McInnes } 449bc5ccf88SSatish Balay 450bd0c2dcbSBarry Smith extern PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat,Vec,Vec); 451bd0c2dcbSBarry Smith 4524a2ae208SSatish Balay #undef __FUNCT__ 4534a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyBegin_MPIAIJ" 454dfbe8321SBarry Smith PetscErrorCode MatAssemblyBegin_MPIAIJ(Mat mat,MatAssemblyType mode) 455bc5ccf88SSatish Balay { 456bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 457dfbe8321SBarry Smith PetscErrorCode ierr; 458b1d57f15SBarry Smith PetscInt nstash,reallocs; 459bc5ccf88SSatish Balay InsertMode addv; 460bc5ccf88SSatish Balay 461bc5ccf88SSatish Balay PetscFunctionBegin; 4624cb17eb5SBarry Smith if (aij->donotstash || mat->nooffprocentries) { 463bc5ccf88SSatish Balay PetscFunctionReturn(0); 464bc5ccf88SSatish Balay } 465bc5ccf88SSatish Balay 466bc5ccf88SSatish Balay /* make sure all processors are either in INSERTMODE or ADDMODE */ 4677adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->insertmode,&addv,1,MPI_INT,MPI_BOR,((PetscObject)mat)->comm);CHKERRQ(ierr); 468e7e72b3dSBarry Smith if (addv == (ADD_VALUES|INSERT_VALUES)) SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_WRONGSTATE,"Some processors inserted others added"); 469bc5ccf88SSatish Balay mat->insertmode = addv; /* in case this processor had no cache */ 470bc5ccf88SSatish Balay 471d0f46423SBarry Smith ierr = MatStashScatterBegin_Private(mat,&mat->stash,mat->rmap->range);CHKERRQ(ierr); 4728798bf22SSatish Balay ierr = MatStashGetInfo_Private(&mat->stash,&nstash,&reallocs);CHKERRQ(ierr); 473ae15b995SBarry Smith ierr = PetscInfo2(aij->A,"Stash has %D entries, uses %D mallocs.\n",nstash,reallocs);CHKERRQ(ierr); 474bc5ccf88SSatish Balay PetscFunctionReturn(0); 475bc5ccf88SSatish Balay } 476bc5ccf88SSatish Balay 4774a2ae208SSatish Balay #undef __FUNCT__ 4784a2ae208SSatish Balay #define __FUNCT__ "MatAssemblyEnd_MPIAIJ" 479dfbe8321SBarry Smith PetscErrorCode MatAssemblyEnd_MPIAIJ(Mat mat,MatAssemblyType mode) 480bc5ccf88SSatish Balay { 481bc5ccf88SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 48291c97fd4SSatish Balay Mat_SeqAIJ *a=(Mat_SeqAIJ *)aij->A->data; 4836849ba73SBarry Smith PetscErrorCode ierr; 484b1d57f15SBarry Smith PetscMPIInt n; 485b1d57f15SBarry Smith PetscInt i,j,rstart,ncols,flg; 486e44c0bd4SBarry Smith PetscInt *row,*col; 487ace3abfcSBarry Smith PetscBool other_disassembled; 48887828ca2SBarry Smith PetscScalar *val; 489bc5ccf88SSatish Balay InsertMode addv = mat->insertmode; 490bc5ccf88SSatish Balay 49191c97fd4SSatish Balay /* do not use 'b = (Mat_SeqAIJ *)aij->B->data' as B can be reset in disassembly */ 492bc5ccf88SSatish Balay PetscFunctionBegin; 4934cb17eb5SBarry Smith if (!aij->donotstash && !mat->nooffprocentries) { 494a2d1c673SSatish Balay while (1) { 4958798bf22SSatish Balay ierr = MatStashScatterGetMesg_Private(&mat->stash,&n,&row,&col,&val,&flg);CHKERRQ(ierr); 496a2d1c673SSatish Balay if (!flg) break; 497a2d1c673SSatish Balay 498bc5ccf88SSatish Balay for (i=0; i<n;) { 499bc5ccf88SSatish Balay /* Now identify the consecutive vals belonging to the same row */ 500bc5ccf88SSatish Balay for (j=i,rstart=row[j]; j<n; j++) { if (row[j] != rstart) break; } 501bc5ccf88SSatish Balay if (j < n) ncols = j-i; 502bc5ccf88SSatish Balay else ncols = n-i; 503bc5ccf88SSatish Balay /* Now assemble all these values with a single function call */ 504bc5ccf88SSatish Balay ierr = MatSetValues_MPIAIJ(mat,1,row+i,ncols,col+i,val+i,addv);CHKERRQ(ierr); 505bc5ccf88SSatish Balay i = j; 506bc5ccf88SSatish Balay } 507bc5ccf88SSatish Balay } 5088798bf22SSatish Balay ierr = MatStashScatterEnd_Private(&mat->stash);CHKERRQ(ierr); 509bc5ccf88SSatish Balay } 5102f53aa61SHong Zhang a->compressedrow.use = PETSC_FALSE; 511bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->A,mode);CHKERRQ(ierr); 512bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->A,mode);CHKERRQ(ierr); 513bc5ccf88SSatish Balay 514bc5ccf88SSatish Balay /* determine if any processor has disassembled, if so we must 515bc5ccf88SSatish Balay also disassemble ourselfs, in order that we may reassemble. */ 516bc5ccf88SSatish Balay /* 517bc5ccf88SSatish Balay if nonzero structure of submatrix B cannot change then we know that 518bc5ccf88SSatish Balay no processor disassembled thus we can skip this stuff 519bc5ccf88SSatish Balay */ 520bc5ccf88SSatish Balay if (!((Mat_SeqAIJ*)aij->B->data)->nonew) { 5217adad957SLisandro Dalcin ierr = MPI_Allreduce(&mat->was_assembled,&other_disassembled,1,MPI_INT,MPI_PROD,((PetscObject)mat)->comm);CHKERRQ(ierr); 522bc5ccf88SSatish Balay if (mat->was_assembled && !other_disassembled) { 523bc5ccf88SSatish Balay ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 524ad59fb31SSatish Balay } 525ad59fb31SSatish Balay } 526bc5ccf88SSatish Balay if (!mat->was_assembled && mode == MAT_FINAL_ASSEMBLY) { 527bc5ccf88SSatish Balay ierr = MatSetUpMultiply_MPIAIJ(mat);CHKERRQ(ierr); 528bc5ccf88SSatish Balay } 5294e0d8c25SBarry Smith ierr = MatSetOption(aij->B,MAT_USE_INODES,PETSC_FALSE);CHKERRQ(ierr); 53091c97fd4SSatish Balay ((Mat_SeqAIJ *)aij->B->data)->compressedrow.use = PETSC_TRUE; /* b->compressedrow.use */ 531bc5ccf88SSatish Balay ierr = MatAssemblyBegin(aij->B,mode);CHKERRQ(ierr); 532bc5ccf88SSatish Balay ierr = MatAssemblyEnd(aij->B,mode);CHKERRQ(ierr); 533bc5ccf88SSatish Balay 5341d79065fSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 535606d414cSSatish Balay aij->rowvalues = 0; 536a30b2313SHong Zhang 537a30b2313SHong Zhang /* used by MatAXPY() */ 53891c97fd4SSatish Balay a->xtoy = 0; ((Mat_SeqAIJ *)aij->B->data)->xtoy = 0; /* b->xtoy = 0 */ 53991c97fd4SSatish Balay a->XtoY = 0; ((Mat_SeqAIJ *)aij->B->data)->XtoY = 0; /* b->XtoY = 0 */ 540a30b2313SHong Zhang 541a7420bb7SBarry Smith if (aij->diag) {ierr = VecDestroy(aij->diag);CHKERRQ(ierr);aij->diag = 0;} 542bd0c2dcbSBarry Smith if (a->inode.size) mat->ops->multdiagonalblock = MatMultDiagonalBlock_MPIAIJ; 543bc5ccf88SSatish Balay PetscFunctionReturn(0); 544bc5ccf88SSatish Balay } 545bc5ccf88SSatish Balay 5464a2ae208SSatish Balay #undef __FUNCT__ 5474a2ae208SSatish Balay #define __FUNCT__ "MatZeroEntries_MPIAIJ" 548dfbe8321SBarry Smith PetscErrorCode MatZeroEntries_MPIAIJ(Mat A) 5491eb62cbbSBarry Smith { 55044a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 551dfbe8321SBarry Smith PetscErrorCode ierr; 5523a40ed3dSBarry Smith 5533a40ed3dSBarry Smith PetscFunctionBegin; 55478b31e54SBarry Smith ierr = MatZeroEntries(l->A);CHKERRQ(ierr); 55578b31e54SBarry Smith ierr = MatZeroEntries(l->B);CHKERRQ(ierr); 5563a40ed3dSBarry Smith PetscFunctionReturn(0); 5571eb62cbbSBarry Smith } 5581eb62cbbSBarry Smith 5594a2ae208SSatish Balay #undef __FUNCT__ 5604a2ae208SSatish Balay #define __FUNCT__ "MatZeroRows_MPIAIJ" 561f4df32b1SMatthew Knepley PetscErrorCode MatZeroRows_MPIAIJ(Mat A,PetscInt N,const PetscInt rows[],PetscScalar diag) 5621eb62cbbSBarry Smith { 56344a69424SLois Curfman McInnes Mat_MPIAIJ *l = (Mat_MPIAIJ*)A->data; 5646849ba73SBarry Smith PetscErrorCode ierr; 5657adad957SLisandro Dalcin PetscMPIInt size = l->size,imdex,n,rank = l->rank,tag = ((PetscObject)A)->tag,lastidx = -1; 566d0f46423SBarry Smith PetscInt i,*owners = A->rmap->range; 567b1d57f15SBarry Smith PetscInt *nprocs,j,idx,nsends,row; 568b1d57f15SBarry Smith PetscInt nmax,*svalues,*starts,*owner,nrecvs; 569b1d57f15SBarry Smith PetscInt *rvalues,count,base,slen,*source; 570d0f46423SBarry Smith PetscInt *lens,*lrows,*values,rstart=A->rmap->rstart; 5717adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)A)->comm; 5721eb62cbbSBarry Smith MPI_Request *send_waits,*recv_waits; 5731eb62cbbSBarry Smith MPI_Status recv_status,*send_status; 5746543fbbaSBarry Smith #if defined(PETSC_DEBUG) 575ace3abfcSBarry Smith PetscBool found = PETSC_FALSE; 5766543fbbaSBarry Smith #endif 5771eb62cbbSBarry Smith 5783a40ed3dSBarry Smith PetscFunctionBegin; 5791eb62cbbSBarry Smith /* first count number of contributors to each processor */ 580b1d57f15SBarry Smith ierr = PetscMalloc(2*size*sizeof(PetscInt),&nprocs);CHKERRQ(ierr); 581b1d57f15SBarry Smith ierr = PetscMemzero(nprocs,2*size*sizeof(PetscInt));CHKERRQ(ierr); 582b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&owner);CHKERRQ(ierr); /* see note*/ 5836543fbbaSBarry Smith j = 0; 5841eb62cbbSBarry Smith for (i=0; i<N; i++) { 5856543fbbaSBarry Smith if (lastidx > (idx = rows[i])) j = 0; 5866543fbbaSBarry Smith lastidx = idx; 5876543fbbaSBarry Smith for (; j<size; j++) { 5881eb62cbbSBarry Smith if (idx >= owners[j] && idx < owners[j+1]) { 5896543fbbaSBarry Smith nprocs[2*j]++; 5906543fbbaSBarry Smith nprocs[2*j+1] = 1; 5916543fbbaSBarry Smith owner[i] = j; 5926543fbbaSBarry Smith #if defined(PETSC_DEBUG) 5936543fbbaSBarry Smith found = PETSC_TRUE; 5946543fbbaSBarry Smith #endif 5956543fbbaSBarry Smith break; 5961eb62cbbSBarry Smith } 5971eb62cbbSBarry Smith } 5986543fbbaSBarry Smith #if defined(PETSC_DEBUG) 599e32f2f54SBarry Smith if (!found) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Index out of range"); 6006543fbbaSBarry Smith found = PETSC_FALSE; 6016543fbbaSBarry Smith #endif 6021eb62cbbSBarry Smith } 603c1dc657dSBarry Smith nsends = 0; for (i=0; i<size; i++) { nsends += nprocs[2*i+1];} 6041eb62cbbSBarry Smith 6057367270fSBarry Smith if (A->nooffproczerorows) { 6067367270fSBarry Smith if (nsends > 1) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"You called MatSetOption(,MAT_NO_OFF_PROC_ZERO_ROWS,PETSC_TRUE) but set an off process zero row"); 6077367270fSBarry Smith nrecvs = nsends; 6087367270fSBarry Smith nmax = N; 6097367270fSBarry Smith } else { 6101eb62cbbSBarry Smith /* inform other processors of number of messages and max length*/ 611c1dc657dSBarry Smith ierr = PetscMaxSum(comm,nprocs,&nmax,&nrecvs);CHKERRQ(ierr); 6127367270fSBarry Smith } 6131eb62cbbSBarry Smith 6141eb62cbbSBarry Smith /* post receives: */ 615b1d57f15SBarry Smith ierr = PetscMalloc((nrecvs+1)*(nmax+1)*sizeof(PetscInt),&rvalues);CHKERRQ(ierr); 616b0a32e0cSBarry Smith ierr = PetscMalloc((nrecvs+1)*sizeof(MPI_Request),&recv_waits);CHKERRQ(ierr); 6171eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 618b1d57f15SBarry Smith ierr = MPI_Irecv(rvalues+nmax*i,nmax,MPIU_INT,MPI_ANY_SOURCE,tag,comm,recv_waits+i);CHKERRQ(ierr); 6191eb62cbbSBarry Smith } 6201eb62cbbSBarry Smith 6211eb62cbbSBarry Smith /* do sends: 6221eb62cbbSBarry Smith 1) starts[i] gives the starting index in svalues for stuff going to 6231eb62cbbSBarry Smith the ith processor 6241eb62cbbSBarry Smith */ 625b1d57f15SBarry Smith ierr = PetscMalloc((N+1)*sizeof(PetscInt),&svalues);CHKERRQ(ierr); 626b0a32e0cSBarry Smith ierr = PetscMalloc((nsends+1)*sizeof(MPI_Request),&send_waits);CHKERRQ(ierr); 627b1d57f15SBarry Smith ierr = PetscMalloc((size+1)*sizeof(PetscInt),&starts);CHKERRQ(ierr); 6281eb62cbbSBarry Smith starts[0] = 0; 629c1dc657dSBarry Smith for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6301eb62cbbSBarry Smith for (i=0; i<N; i++) { 6311eb62cbbSBarry Smith svalues[starts[owner[i]]++] = rows[i]; 6321eb62cbbSBarry Smith } 6331eb62cbbSBarry Smith 6341eb62cbbSBarry Smith starts[0] = 0; 635c1dc657dSBarry Smith for (i=1; i<size+1; i++) { starts[i] = starts[i-1] + nprocs[2*i-2];} 6361eb62cbbSBarry Smith count = 0; 63717699dbbSLois Curfman McInnes for (i=0; i<size; i++) { 638c1dc657dSBarry Smith if (nprocs[2*i+1]) { 639b1d57f15SBarry Smith ierr = MPI_Isend(svalues+starts[i],nprocs[2*i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr); 6401eb62cbbSBarry Smith } 6411eb62cbbSBarry Smith } 642606d414cSSatish Balay ierr = PetscFree(starts);CHKERRQ(ierr); 6431eb62cbbSBarry Smith 64417699dbbSLois Curfman McInnes base = owners[rank]; 6451eb62cbbSBarry Smith 6461eb62cbbSBarry Smith /* wait on receives */ 6471d79065fSBarry Smith ierr = PetscMalloc2(nrecvs,PetscInt,&lens,nrecvs,PetscInt,&source);CHKERRQ(ierr); 6481eb62cbbSBarry Smith count = nrecvs; slen = 0; 6491eb62cbbSBarry Smith while (count) { 650ca161407SBarry Smith ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr); 6511eb62cbbSBarry Smith /* unpack receives into our local space */ 652b1d57f15SBarry Smith ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr); 653d6dfbf8fSBarry Smith source[imdex] = recv_status.MPI_SOURCE; 654d6dfbf8fSBarry Smith lens[imdex] = n; 6551eb62cbbSBarry Smith slen += n; 6561eb62cbbSBarry Smith count--; 6571eb62cbbSBarry Smith } 658606d414cSSatish Balay ierr = PetscFree(recv_waits);CHKERRQ(ierr); 6591eb62cbbSBarry Smith 6601eb62cbbSBarry Smith /* move the data into the send scatter */ 661b1d57f15SBarry Smith ierr = PetscMalloc((slen+1)*sizeof(PetscInt),&lrows);CHKERRQ(ierr); 6621eb62cbbSBarry Smith count = 0; 6631eb62cbbSBarry Smith for (i=0; i<nrecvs; i++) { 6641eb62cbbSBarry Smith values = rvalues + i*nmax; 6651eb62cbbSBarry Smith for (j=0; j<lens[i]; j++) { 6661eb62cbbSBarry Smith lrows[count++] = values[j] - base; 6671eb62cbbSBarry Smith } 6681eb62cbbSBarry Smith } 669606d414cSSatish Balay ierr = PetscFree(rvalues);CHKERRQ(ierr); 6701d79065fSBarry Smith ierr = PetscFree2(lens,source);CHKERRQ(ierr); 671606d414cSSatish Balay ierr = PetscFree(owner);CHKERRQ(ierr); 672606d414cSSatish Balay ierr = PetscFree(nprocs);CHKERRQ(ierr); 6731eb62cbbSBarry Smith 6741eb62cbbSBarry Smith /* actually zap the local rows */ 6756eb55b6aSBarry Smith /* 6766eb55b6aSBarry Smith Zero the required rows. If the "diagonal block" of the matrix 677a8c7a070SBarry Smith is square and the user wishes to set the diagonal we use separate 6786eb55b6aSBarry Smith code so that MatSetValues() is not called for each diagonal allocating 6796eb55b6aSBarry Smith new memory, thus calling lots of mallocs and slowing things down. 6806eb55b6aSBarry Smith 6816eb55b6aSBarry Smith */ 682e2d53e46SBarry Smith /* must zero l->B before l->A because the (diag) case below may put values into l->B*/ 683f4df32b1SMatthew Knepley ierr = MatZeroRows(l->B,slen,lrows,0.0);CHKERRQ(ierr); 684d0f46423SBarry Smith if ((diag != 0.0) && (l->A->rmap->N == l->A->cmap->N)) { 685f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,diag);CHKERRQ(ierr); 686f4df32b1SMatthew Knepley } else if (diag != 0.0) { 687f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 688fa46199cSSatish Balay if (((Mat_SeqAIJ*)l->A->data)->nonew) { 689e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"MatZeroRows() on rectangular matrices cannot be used with the Mat options\n\ 690512a5fc5SBarry Smith MAT_NEW_NONZERO_LOCATIONS,MAT_NEW_NONZERO_LOCATION_ERR,MAT_NEW_NONZERO_ALLOCATION_ERR"); 6916525c446SSatish Balay } 692e2d53e46SBarry Smith for (i = 0; i < slen; i++) { 693e2d53e46SBarry Smith row = lrows[i] + rstart; 694f4df32b1SMatthew Knepley ierr = MatSetValues(A,1,&row,1,&row,&diag,INSERT_VALUES);CHKERRQ(ierr); 695e2d53e46SBarry Smith } 696e2d53e46SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 697e2d53e46SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 6986eb55b6aSBarry Smith } else { 699f4df32b1SMatthew Knepley ierr = MatZeroRows(l->A,slen,lrows,0.0);CHKERRQ(ierr); 7006eb55b6aSBarry Smith } 701606d414cSSatish Balay ierr = PetscFree(lrows);CHKERRQ(ierr); 70272dacd9aSBarry Smith 7031eb62cbbSBarry Smith /* wait on sends */ 7041eb62cbbSBarry Smith if (nsends) { 705b0a32e0cSBarry Smith ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr); 706ca161407SBarry Smith ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr); 707606d414cSSatish Balay ierr = PetscFree(send_status);CHKERRQ(ierr); 7081eb62cbbSBarry Smith } 709606d414cSSatish Balay ierr = PetscFree(send_waits);CHKERRQ(ierr); 710606d414cSSatish Balay ierr = PetscFree(svalues);CHKERRQ(ierr); 7111eb62cbbSBarry Smith 7123a40ed3dSBarry Smith PetscFunctionReturn(0); 7131eb62cbbSBarry Smith } 7141eb62cbbSBarry Smith 7154a2ae208SSatish Balay #undef __FUNCT__ 7164a2ae208SSatish Balay #define __FUNCT__ "MatMult_MPIAIJ" 717dfbe8321SBarry Smith PetscErrorCode MatMult_MPIAIJ(Mat A,Vec xx,Vec yy) 7181eb62cbbSBarry Smith { 719416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 720dfbe8321SBarry Smith PetscErrorCode ierr; 721b1d57f15SBarry Smith PetscInt nt; 722416022c9SBarry Smith 7233a40ed3dSBarry Smith PetscFunctionBegin; 724a2ce50c7SBarry Smith ierr = VecGetLocalSize(xx,&nt);CHKERRQ(ierr); 72565e19b50SBarry Smith if (nt != A->cmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Incompatible partition of A (%D) and xx (%D)",A->cmap->n,nt); 726ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 727f830108cSBarry Smith ierr = (*a->A->ops->mult)(a->A,xx,yy);CHKERRQ(ierr); 728ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 729f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,yy,yy);CHKERRQ(ierr); 7303a40ed3dSBarry Smith PetscFunctionReturn(0); 7311eb62cbbSBarry Smith } 7321eb62cbbSBarry Smith 7334a2ae208SSatish Balay #undef __FUNCT__ 734bd0c2dcbSBarry Smith #define __FUNCT__ "MatMultDiagonalBlock_MPIAIJ" 735bd0c2dcbSBarry Smith PetscErrorCode MatMultDiagonalBlock_MPIAIJ(Mat A,Vec bb,Vec xx) 736bd0c2dcbSBarry Smith { 737bd0c2dcbSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 738bd0c2dcbSBarry Smith PetscErrorCode ierr; 739bd0c2dcbSBarry Smith 740bd0c2dcbSBarry Smith PetscFunctionBegin; 741bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(a->A,bb,xx);CHKERRQ(ierr); 742bd0c2dcbSBarry Smith PetscFunctionReturn(0); 743bd0c2dcbSBarry Smith } 744bd0c2dcbSBarry Smith 745bd0c2dcbSBarry Smith #undef __FUNCT__ 7464a2ae208SSatish Balay #define __FUNCT__ "MatMultAdd_MPIAIJ" 747dfbe8321SBarry Smith PetscErrorCode MatMultAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 748da3a660dSBarry Smith { 749416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 750dfbe8321SBarry Smith PetscErrorCode ierr; 7513a40ed3dSBarry Smith 7523a40ed3dSBarry Smith PetscFunctionBegin; 753ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 754f830108cSBarry Smith ierr = (*a->A->ops->multadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 755ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,xx,a->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 756f830108cSBarry Smith ierr = (*a->B->ops->multadd)(a->B,a->lvec,zz,zz);CHKERRQ(ierr); 7573a40ed3dSBarry Smith PetscFunctionReturn(0); 758da3a660dSBarry Smith } 759da3a660dSBarry Smith 7604a2ae208SSatish Balay #undef __FUNCT__ 7614a2ae208SSatish Balay #define __FUNCT__ "MatMultTranspose_MPIAIJ" 762dfbe8321SBarry Smith PetscErrorCode MatMultTranspose_MPIAIJ(Mat A,Vec xx,Vec yy) 763da3a660dSBarry Smith { 764416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 765dfbe8321SBarry Smith PetscErrorCode ierr; 766ace3abfcSBarry Smith PetscBool merged; 767da3a660dSBarry Smith 7683a40ed3dSBarry Smith PetscFunctionBegin; 769a5ff213dSBarry Smith ierr = VecScatterGetMerged(a->Mvctx,&merged);CHKERRQ(ierr); 770da3a660dSBarry Smith /* do nondiagonal part */ 7717c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 772a5ff213dSBarry Smith if (!merged) { 773da3a660dSBarry Smith /* send it on its way */ 774ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 775da3a660dSBarry Smith /* do local part */ 7767c922b88SBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 777da3a660dSBarry Smith /* receive remote parts: note this assumes the values are not actually */ 778a5ff213dSBarry Smith /* added in yy until the next line, */ 779ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 780a5ff213dSBarry Smith } else { 781a5ff213dSBarry Smith /* do local part */ 782a5ff213dSBarry Smith ierr = (*a->A->ops->multtranspose)(a->A,xx,yy);CHKERRQ(ierr); 783a5ff213dSBarry Smith /* send it on its way */ 784ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 785a5ff213dSBarry Smith /* values actually were received in the Begin() but we need to call this nop */ 786ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,yy,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 787a5ff213dSBarry Smith } 7883a40ed3dSBarry Smith PetscFunctionReturn(0); 789da3a660dSBarry Smith } 790da3a660dSBarry Smith 791cd0d46ebSvictorle EXTERN_C_BEGIN 792cd0d46ebSvictorle #undef __FUNCT__ 7935fbd3699SBarry Smith #define __FUNCT__ "MatIsTranspose_MPIAIJ" 794ace3abfcSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatIsTranspose_MPIAIJ(Mat Amat,Mat Bmat,PetscReal tol,PetscBool *f) 795cd0d46ebSvictorle { 7964f423910Svictorle MPI_Comm comm; 797cd0d46ebSvictorle Mat_MPIAIJ *Aij = (Mat_MPIAIJ *) Amat->data, *Bij; 79866501d38Svictorle Mat Adia = Aij->A, Bdia, Aoff,Boff,*Aoffs,*Boffs; 799cd0d46ebSvictorle IS Me,Notme; 8006849ba73SBarry Smith PetscErrorCode ierr; 801b1d57f15SBarry Smith PetscInt M,N,first,last,*notme,i; 802b1d57f15SBarry Smith PetscMPIInt size; 803cd0d46ebSvictorle 804cd0d46ebSvictorle PetscFunctionBegin; 80542e5f5b4Svictorle 80642e5f5b4Svictorle /* Easy test: symmetric diagonal block */ 80766501d38Svictorle Bij = (Mat_MPIAIJ *) Bmat->data; Bdia = Bij->A; 8085485867bSBarry Smith ierr = MatIsTranspose(Adia,Bdia,tol,f);CHKERRQ(ierr); 809cd0d46ebSvictorle if (!*f) PetscFunctionReturn(0); 8104f423910Svictorle ierr = PetscObjectGetComm((PetscObject)Amat,&comm);CHKERRQ(ierr); 811b1d57f15SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 812b1d57f15SBarry Smith if (size == 1) PetscFunctionReturn(0); 81342e5f5b4Svictorle 81442e5f5b4Svictorle /* Hard test: off-diagonal block. This takes a MatGetSubMatrix. */ 815cd0d46ebSvictorle ierr = MatGetSize(Amat,&M,&N);CHKERRQ(ierr); 816cd0d46ebSvictorle ierr = MatGetOwnershipRange(Amat,&first,&last);CHKERRQ(ierr); 817b1d57f15SBarry Smith ierr = PetscMalloc((N-last+first)*sizeof(PetscInt),¬me);CHKERRQ(ierr); 818cd0d46ebSvictorle for (i=0; i<first; i++) notme[i] = i; 819cd0d46ebSvictorle for (i=last; i<M; i++) notme[i-last+first] = i; 82070b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,N-last+first,notme,PETSC_COPY_VALUES,&Notme);CHKERRQ(ierr); 821268466fbSBarry Smith ierr = ISCreateStride(MPI_COMM_SELF,last-first,first,1,&Me);CHKERRQ(ierr); 822268466fbSBarry Smith ierr = MatGetSubMatrices(Amat,1,&Me,&Notme,MAT_INITIAL_MATRIX,&Aoffs);CHKERRQ(ierr); 82366501d38Svictorle Aoff = Aoffs[0]; 824268466fbSBarry Smith ierr = MatGetSubMatrices(Bmat,1,&Notme,&Me,MAT_INITIAL_MATRIX,&Boffs);CHKERRQ(ierr); 82566501d38Svictorle Boff = Boffs[0]; 8265485867bSBarry Smith ierr = MatIsTranspose(Aoff,Boff,tol,f);CHKERRQ(ierr); 82766501d38Svictorle ierr = MatDestroyMatrices(1,&Aoffs);CHKERRQ(ierr); 82866501d38Svictorle ierr = MatDestroyMatrices(1,&Boffs);CHKERRQ(ierr); 82942e5f5b4Svictorle ierr = ISDestroy(Me);CHKERRQ(ierr); 83042e5f5b4Svictorle ierr = ISDestroy(Notme);CHKERRQ(ierr); 83142e5f5b4Svictorle 832cd0d46ebSvictorle PetscFunctionReturn(0); 833cd0d46ebSvictorle } 834cd0d46ebSvictorle EXTERN_C_END 835cd0d46ebSvictorle 8364a2ae208SSatish Balay #undef __FUNCT__ 8374a2ae208SSatish Balay #define __FUNCT__ "MatMultTransposeAdd_MPIAIJ" 838dfbe8321SBarry Smith PetscErrorCode MatMultTransposeAdd_MPIAIJ(Mat A,Vec xx,Vec yy,Vec zz) 839da3a660dSBarry Smith { 840416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 841dfbe8321SBarry Smith PetscErrorCode ierr; 842da3a660dSBarry Smith 8433a40ed3dSBarry Smith PetscFunctionBegin; 844da3a660dSBarry Smith /* do nondiagonal part */ 8457c922b88SBarry Smith ierr = (*a->B->ops->multtranspose)(a->B,xx,a->lvec);CHKERRQ(ierr); 846da3a660dSBarry Smith /* send it on its way */ 847ca9f406cSSatish Balay ierr = VecScatterBegin(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 848da3a660dSBarry Smith /* do local part */ 8497c922b88SBarry Smith ierr = (*a->A->ops->multtransposeadd)(a->A,xx,yy,zz);CHKERRQ(ierr); 850a5ff213dSBarry Smith /* receive remote parts */ 851ca9f406cSSatish Balay ierr = VecScatterEnd(a->Mvctx,a->lvec,zz,ADD_VALUES,SCATTER_REVERSE);CHKERRQ(ierr); 8523a40ed3dSBarry Smith PetscFunctionReturn(0); 853da3a660dSBarry Smith } 854da3a660dSBarry Smith 8551eb62cbbSBarry Smith /* 8561eb62cbbSBarry Smith This only works correctly for square matrices where the subblock A->A is the 8571eb62cbbSBarry Smith diagonal block 8581eb62cbbSBarry Smith */ 8594a2ae208SSatish Balay #undef __FUNCT__ 8604a2ae208SSatish Balay #define __FUNCT__ "MatGetDiagonal_MPIAIJ" 861dfbe8321SBarry Smith PetscErrorCode MatGetDiagonal_MPIAIJ(Mat A,Vec v) 8621eb62cbbSBarry Smith { 863dfbe8321SBarry Smith PetscErrorCode ierr; 864416022c9SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 8653a40ed3dSBarry Smith 8663a40ed3dSBarry Smith PetscFunctionBegin; 867e7e72b3dSBarry Smith if (A->rmap->N != A->cmap->N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_SUP,"Supports only square matrix where A->A is diag block"); 868e7e72b3dSBarry Smith if (A->rmap->rstart != A->cmap->rstart || A->rmap->rend != A->cmap->rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"row partition must equal col partition"); 8693a40ed3dSBarry Smith ierr = MatGetDiagonal(a->A,v);CHKERRQ(ierr); 8703a40ed3dSBarry Smith PetscFunctionReturn(0); 8711eb62cbbSBarry Smith } 8721eb62cbbSBarry Smith 8734a2ae208SSatish Balay #undef __FUNCT__ 8744a2ae208SSatish Balay #define __FUNCT__ "MatScale_MPIAIJ" 875f4df32b1SMatthew Knepley PetscErrorCode MatScale_MPIAIJ(Mat A,PetscScalar aa) 876052efed2SBarry Smith { 877052efed2SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 878dfbe8321SBarry Smith PetscErrorCode ierr; 8793a40ed3dSBarry Smith 8803a40ed3dSBarry Smith PetscFunctionBegin; 881f4df32b1SMatthew Knepley ierr = MatScale(a->A,aa);CHKERRQ(ierr); 882f4df32b1SMatthew Knepley ierr = MatScale(a->B,aa);CHKERRQ(ierr); 8833a40ed3dSBarry Smith PetscFunctionReturn(0); 884052efed2SBarry Smith } 885052efed2SBarry Smith 8864a2ae208SSatish Balay #undef __FUNCT__ 8874a2ae208SSatish Balay #define __FUNCT__ "MatDestroy_MPIAIJ" 888dfbe8321SBarry Smith PetscErrorCode MatDestroy_MPIAIJ(Mat mat) 8891eb62cbbSBarry Smith { 89044a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 891dfbe8321SBarry Smith PetscErrorCode ierr; 89283e2fdc7SBarry Smith 8933a40ed3dSBarry Smith PetscFunctionBegin; 894aa482453SBarry Smith #if defined(PETSC_USE_LOG) 895d0f46423SBarry Smith PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D",mat->rmap->N,mat->cmap->N); 896a5a9c739SBarry Smith #endif 8978798bf22SSatish Balay ierr = MatStashDestroy_Private(&mat->stash);CHKERRQ(ierr); 898a7420bb7SBarry Smith if (aij->diag) {ierr = VecDestroy(aij->diag);CHKERRQ(ierr);} 899d88c0aacSHong Zhang if (aij->A){ierr = MatDestroy(aij->A);CHKERRQ(ierr);} 900d88c0aacSHong Zhang if (aij->B){ierr = MatDestroy(aij->B);CHKERRQ(ierr);} 901aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 9029c666560SBarry Smith if (aij->colmap) {ierr = PetscTableDestroy(aij->colmap);CHKERRQ(ierr);} 903b1fc9764SSatish Balay #else 90405b42c5fSBarry Smith ierr = PetscFree(aij->colmap);CHKERRQ(ierr); 905b1fc9764SSatish Balay #endif 90605b42c5fSBarry Smith ierr = PetscFree(aij->garray);CHKERRQ(ierr); 9077c922b88SBarry Smith if (aij->lvec) {ierr = VecDestroy(aij->lvec);CHKERRQ(ierr);} 9087c922b88SBarry Smith if (aij->Mvctx) {ierr = VecScatterDestroy(aij->Mvctx);CHKERRQ(ierr);} 90903095fedSBarry Smith ierr = PetscFree2(aij->rowvalues,aij->rowindices);CHKERRQ(ierr); 9108aa348c1SBarry Smith ierr = PetscFree(aij->ld);CHKERRQ(ierr); 911606d414cSSatish Balay ierr = PetscFree(aij);CHKERRQ(ierr); 912901853e0SKris Buschelman 913dbd8c25aSHong Zhang ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr); 914901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatStoreValues_C","",PETSC_NULL);CHKERRQ(ierr); 915901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatRetrieveValues_C","",PETSC_NULL);CHKERRQ(ierr); 916901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatGetDiagonalBlock_C","",PETSC_NULL);CHKERRQ(ierr); 917901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatIsTranspose_C","",PETSC_NULL);CHKERRQ(ierr); 918901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr); 919ff69c46cSKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAIJSetPreallocationCSR_C","",PETSC_NULL);CHKERRQ(ierr); 920901853e0SKris Buschelman ierr = PetscObjectComposeFunction((PetscObject)mat,"MatDiagonalScaleLocal_C","",PETSC_NULL);CHKERRQ(ierr); 921471cc821SHong Zhang ierr = PetscObjectComposeFunction((PetscObject)mat,"MatConvert_mpiaij_mpisbaij_C","",PETSC_NULL);CHKERRQ(ierr); 9223a40ed3dSBarry Smith PetscFunctionReturn(0); 9231eb62cbbSBarry Smith } 924ee50ffe9SBarry Smith 9254a2ae208SSatish Balay #undef __FUNCT__ 9268e2fed03SBarry Smith #define __FUNCT__ "MatView_MPIAIJ_Binary" 927dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_Binary(Mat mat,PetscViewer viewer) 9288e2fed03SBarry Smith { 9298e2fed03SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 9308e2fed03SBarry Smith Mat_SeqAIJ* A = (Mat_SeqAIJ*)aij->A->data; 9318e2fed03SBarry Smith Mat_SeqAIJ* B = (Mat_SeqAIJ*)aij->B->data; 9326849ba73SBarry Smith PetscErrorCode ierr; 93332dcc486SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 9346f69ff64SBarry Smith int fd; 935a788621eSSatish Balay PetscInt nz,header[4],*row_lengths,*range=0,rlen,i; 936d0f46423SBarry Smith PetscInt nzmax,*column_indices,j,k,col,*garray = aij->garray,cnt,cstart = mat->cmap->rstart,rnz; 9378e2fed03SBarry Smith PetscScalar *column_values; 938*85ebf7a4SBarry Smith PetscInt message_count,flowcontrolcount; 9398e2fed03SBarry Smith 9408e2fed03SBarry Smith PetscFunctionBegin; 9417adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 9427adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)mat)->comm,&size);CHKERRQ(ierr); 9438e2fed03SBarry Smith nz = A->nz + B->nz; 944958c9bccSBarry Smith if (!rank) { 9450700a824SBarry Smith header[0] = MAT_FILE_CLASSID; 946d0f46423SBarry Smith header[1] = mat->rmap->N; 947d0f46423SBarry Smith header[2] = mat->cmap->N; 9487adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&header[3],1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 9498e2fed03SBarry Smith ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 9506f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,header,4,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9518e2fed03SBarry Smith /* get largest number of rows any processor has */ 952d0f46423SBarry Smith rlen = mat->rmap->n; 953d0f46423SBarry Smith range = mat->rmap->range; 9548e2fed03SBarry Smith for (i=1; i<size; i++) { 9558e2fed03SBarry Smith rlen = PetscMax(rlen,range[i+1] - range[i]); 9568e2fed03SBarry Smith } 9578e2fed03SBarry Smith } else { 9587adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,0,1,MPIU_INT,MPI_SUM,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 959d0f46423SBarry Smith rlen = mat->rmap->n; 9608e2fed03SBarry Smith } 9618e2fed03SBarry Smith 9628e2fed03SBarry Smith /* load up the local row counts */ 963b1d57f15SBarry Smith ierr = PetscMalloc((rlen+1)*sizeof(PetscInt),&row_lengths);CHKERRQ(ierr); 964d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9658e2fed03SBarry Smith row_lengths[i] = A->i[i+1] - A->i[i] + B->i[i+1] - B->i[i]; 9668e2fed03SBarry Smith } 9678e2fed03SBarry Smith 9688e2fed03SBarry Smith /* store the row lengths to the file */ 969*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 970958c9bccSBarry Smith if (!rank) { 9718e2fed03SBarry Smith MPI_Status status; 972d0f46423SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,mat->rmap->n,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9738e2fed03SBarry Smith for (i=1; i<size; i++) { 974*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr); 9758e2fed03SBarry Smith rlen = range[i+1] - range[i]; 976a1319256SJed Brown ierr = MPI_Recv(row_lengths,rlen,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 9776f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,row_lengths,rlen,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 9788e2fed03SBarry Smith } 979*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr); 9808e2fed03SBarry Smith } else { 981*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr); 982d0f46423SBarry Smith ierr = MPI_Send(row_lengths,mat->rmap->n,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 983*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr); 9848e2fed03SBarry Smith } 9858e2fed03SBarry Smith ierr = PetscFree(row_lengths);CHKERRQ(ierr); 9868e2fed03SBarry Smith 9878e2fed03SBarry Smith /* load up the local column indices */ 9888e2fed03SBarry Smith nzmax = nz; /* )th processor needs space a largest processor needs */ 9897adad957SLisandro Dalcin ierr = MPI_Reduce(&nz,&nzmax,1,MPIU_INT,MPI_MAX,0,((PetscObject)mat)->comm);CHKERRQ(ierr); 990b1d57f15SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscInt),&column_indices);CHKERRQ(ierr); 9918e2fed03SBarry Smith cnt = 0; 992d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 9938e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 9948e2fed03SBarry Smith if ( (col = garray[B->j[j]]) > cstart) break; 9958e2fed03SBarry Smith column_indices[cnt++] = col; 9968e2fed03SBarry Smith } 9978e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 9988e2fed03SBarry Smith column_indices[cnt++] = A->j[k] + cstart; 9998e2fed03SBarry Smith } 10008e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 10018e2fed03SBarry Smith column_indices[cnt++] = garray[B->j[j]]; 10028e2fed03SBarry Smith } 10038e2fed03SBarry Smith } 1004e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10058e2fed03SBarry Smith 10068e2fed03SBarry Smith /* store the column indices to the file */ 1007*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1008958c9bccSBarry Smith if (!rank) { 10098e2fed03SBarry Smith MPI_Status status; 10106f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,nz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 10118e2fed03SBarry Smith for (i=1; i<size; i++) { 1012*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr); 10137adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 1014e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10157adad957SLisandro Dalcin ierr = MPI_Recv(column_indices,rnz,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10166f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_indices,rnz,PETSC_INT,PETSC_TRUE);CHKERRQ(ierr); 10178e2fed03SBarry Smith } 1018*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr); 10198e2fed03SBarry Smith } else { 1020*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr); 10217adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10227adad957SLisandro Dalcin ierr = MPI_Send(column_indices,nz,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 1023*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr); 10248e2fed03SBarry Smith } 10258e2fed03SBarry Smith ierr = PetscFree(column_indices);CHKERRQ(ierr); 10268e2fed03SBarry Smith 10278e2fed03SBarry Smith /* load up the local column values */ 10288e2fed03SBarry Smith ierr = PetscMalloc((nzmax+1)*sizeof(PetscScalar),&column_values);CHKERRQ(ierr); 10298e2fed03SBarry Smith cnt = 0; 1030d0f46423SBarry Smith for (i=0; i<mat->rmap->n; i++) { 10318e2fed03SBarry Smith for (j=B->i[i]; j<B->i[i+1]; j++) { 10328e2fed03SBarry Smith if ( garray[B->j[j]] > cstart) break; 10338e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10348e2fed03SBarry Smith } 10358e2fed03SBarry Smith for (k=A->i[i]; k<A->i[i+1]; k++) { 10368e2fed03SBarry Smith column_values[cnt++] = A->a[k]; 10378e2fed03SBarry Smith } 10388e2fed03SBarry Smith for (; j<B->i[i+1]; j++) { 10398e2fed03SBarry Smith column_values[cnt++] = B->a[j]; 10408e2fed03SBarry Smith } 10418e2fed03SBarry Smith } 1042e32f2f54SBarry Smith if (cnt != A->nz + B->nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Internal PETSc error: cnt = %D nz = %D",cnt,A->nz+B->nz); 10438e2fed03SBarry Smith 10448e2fed03SBarry Smith /* store the column values to the file */ 1045*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStart(viewer,&message_count,&flowcontrolcount);CHKERRQ(ierr); 1046958c9bccSBarry Smith if (!rank) { 10478e2fed03SBarry Smith MPI_Status status; 10486f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,nz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10498e2fed03SBarry Smith for (i=1; i<size; i++) { 1050*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepMaster(viewer,i,message_count,flowcontrolcount);CHKERRQ(ierr); 10517adad957SLisandro Dalcin ierr = MPI_Recv(&rnz,1,MPIU_INT,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 1052e32f2f54SBarry Smith if (rnz > nzmax) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_LIB,"Internal PETSc error: nz = %D nzmax = %D",nz,nzmax); 10537adad957SLisandro Dalcin ierr = MPI_Recv(column_values,rnz,MPIU_SCALAR,i,tag,((PetscObject)mat)->comm,&status);CHKERRQ(ierr); 10546f69ff64SBarry Smith ierr = PetscBinaryWrite(fd,column_values,rnz,PETSC_SCALAR,PETSC_TRUE);CHKERRQ(ierr); 10558e2fed03SBarry Smith } 1056*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndMaster(viewer,message_count);CHKERRQ(ierr); 10578e2fed03SBarry Smith } else { 1058*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlStepWorker(viewer,rank,message_count);CHKERRQ(ierr); 10597adad957SLisandro Dalcin ierr = MPI_Send(&nz,1,MPIU_INT,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 10607adad957SLisandro Dalcin ierr = MPI_Send(column_values,nz,MPIU_SCALAR,0,tag,((PetscObject)mat)->comm);CHKERRQ(ierr); 1061*85ebf7a4SBarry Smith ierr = PetscViewerFlowControlEndWorker(viewer,message_count);CHKERRQ(ierr); 10628e2fed03SBarry Smith } 10638e2fed03SBarry Smith ierr = PetscFree(column_values);CHKERRQ(ierr); 10648e2fed03SBarry Smith PetscFunctionReturn(0); 10658e2fed03SBarry Smith } 10668e2fed03SBarry Smith 10678e2fed03SBarry Smith #undef __FUNCT__ 10684a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ_ASCIIorDraworSocket" 1069dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ_ASCIIorDraworSocket(Mat mat,PetscViewer viewer) 1070416022c9SBarry Smith { 107144a69424SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1072dfbe8321SBarry Smith PetscErrorCode ierr; 107332dcc486SBarry Smith PetscMPIInt rank = aij->rank,size = aij->size; 1074ace3abfcSBarry Smith PetscBool isdraw,iascii,isbinary; 1075b0a32e0cSBarry Smith PetscViewer sviewer; 1076f3ef73ceSBarry Smith PetscViewerFormat format; 1077416022c9SBarry Smith 10783a40ed3dSBarry Smith PetscFunctionBegin; 10792692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 10802692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 10812692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 108232077d6dSBarry Smith if (iascii) { 1083b0a32e0cSBarry Smith ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr); 1084456192e2SBarry Smith if (format == PETSC_VIEWER_ASCII_INFO_DETAIL) { 10854e220ebcSLois Curfman McInnes MatInfo info; 1086ace3abfcSBarry Smith PetscBool inodes; 1087923f20ffSKris Buschelman 10887adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)mat)->comm,&rank);CHKERRQ(ierr); 1089888f2ed8SSatish Balay ierr = MatGetInfo(mat,MAT_LOCAL,&info);CHKERRQ(ierr); 1090923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,PETSC_NULL,(PetscInt **)&inodes,PETSC_NULL);CHKERRQ(ierr); 1091923f20ffSKris Buschelman if (!inodes) { 109277431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, not using I-node routines\n", 1093d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10946831982aSBarry Smith } else { 109577431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] Local rows %D nz %D nz alloced %D mem %D, using I-node routines\n", 1096d0f46423SBarry Smith rank,mat->rmap->n,(PetscInt)info.nz_used,(PetscInt)info.nz_allocated,(PetscInt)info.memory);CHKERRQ(ierr); 10976831982aSBarry Smith } 1098888f2ed8SSatish Balay ierr = MatGetInfo(aij->A,MAT_LOCAL,&info);CHKERRQ(ierr); 109977431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] on-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1100888f2ed8SSatish Balay ierr = MatGetInfo(aij->B,MAT_LOCAL,&info);CHKERRQ(ierr); 110177431f27SBarry Smith ierr = PetscViewerASCIISynchronizedPrintf(viewer,"[%d] off-diagonal part: nz %D \n",rank,(PetscInt)info.nz_used);CHKERRQ(ierr); 1102b0a32e0cSBarry Smith ierr = PetscViewerFlush(viewer);CHKERRQ(ierr); 110307d81ca4SBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"Information on VecScatter used in matrix-vector product: \n");CHKERRQ(ierr); 1104a40aa06bSLois Curfman McInnes ierr = VecScatterView(aij->Mvctx,viewer);CHKERRQ(ierr); 11053a40ed3dSBarry Smith PetscFunctionReturn(0); 1106fb9695e5SSatish Balay } else if (format == PETSC_VIEWER_ASCII_INFO) { 1107923f20ffSKris Buschelman PetscInt inodecount,inodelimit,*inodes; 1108923f20ffSKris Buschelman ierr = MatInodeGetInodeSizes(aij->A,&inodecount,&inodes,&inodelimit);CHKERRQ(ierr); 1109923f20ffSKris Buschelman if (inodes) { 1110923f20ffSKris Buschelman ierr = PetscViewerASCIIPrintf(viewer,"using I-node (on process 0) routines: found %D nodes, limit used is %D\n",inodecount,inodelimit);CHKERRQ(ierr); 1111d38fa0fbSBarry Smith } else { 1112d38fa0fbSBarry Smith ierr = PetscViewerASCIIPrintf(viewer,"not using I-node (on process 0) routines\n");CHKERRQ(ierr); 1113d38fa0fbSBarry Smith } 11143a40ed3dSBarry Smith PetscFunctionReturn(0); 11154aedb280SBarry Smith } else if (format == PETSC_VIEWER_ASCII_FACTOR_INFO) { 11164aedb280SBarry Smith PetscFunctionReturn(0); 111708480c60SBarry Smith } 11188e2fed03SBarry Smith } else if (isbinary) { 11198e2fed03SBarry Smith if (size == 1) { 11207adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11218e2fed03SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11228e2fed03SBarry Smith } else { 11238e2fed03SBarry Smith ierr = MatView_MPIAIJ_Binary(mat,viewer);CHKERRQ(ierr); 11248e2fed03SBarry Smith } 11258e2fed03SBarry Smith PetscFunctionReturn(0); 11260f5bd95cSBarry Smith } else if (isdraw) { 1127b0a32e0cSBarry Smith PetscDraw draw; 1128ace3abfcSBarry Smith PetscBool isnull; 1129b0a32e0cSBarry Smith ierr = PetscViewerDrawGetDraw(viewer,0,&draw);CHKERRQ(ierr); 1130b0a32e0cSBarry Smith ierr = PetscDrawIsNull(draw,&isnull);CHKERRQ(ierr); if (isnull) PetscFunctionReturn(0); 113119bcc07fSBarry Smith } 113219bcc07fSBarry Smith 113317699dbbSLois Curfman McInnes if (size == 1) { 11347adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)aij->A,((PetscObject)mat)->name);CHKERRQ(ierr); 113578b31e54SBarry Smith ierr = MatView(aij->A,viewer);CHKERRQ(ierr); 11363a40ed3dSBarry Smith } else { 113795373324SBarry Smith /* assemble the entire matrix onto first processor. */ 113895373324SBarry Smith Mat A; 1139ec8511deSBarry Smith Mat_SeqAIJ *Aloc; 1140d0f46423SBarry Smith PetscInt M = mat->rmap->N,N = mat->cmap->N,m,*ai,*aj,row,*cols,i,*ct; 1141dd6ea824SBarry Smith MatScalar *a; 11422ee70a88SLois Curfman McInnes 114332a366e4SMatthew Knepley if (mat->rmap->N > 1024) { 1144ace3abfcSBarry Smith PetscBool flg = PETSC_FALSE; 114532a366e4SMatthew Knepley 1146acfcf0e5SJed Brown ierr = PetscOptionsGetBool(((PetscObject) mat)->prefix, "-mat_ascii_output_large", &flg,PETSC_NULL);CHKERRQ(ierr); 114732a366e4SMatthew Knepley if (!flg) { 1148e7e72b3dSBarry Smith SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_ARG_OUTOFRANGE,"ASCII matrix output not allowed for matrices with more than 1024 rows, use binary format instead.\nYou can override this restriction using -mat_ascii_output_large."); 114932a366e4SMatthew Knepley } 115032a366e4SMatthew Knepley } 11510805154bSBarry Smith 11527adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)mat)->comm,&A);CHKERRQ(ierr); 115317699dbbSLois Curfman McInnes if (!rank) { 1154f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,M,N,M,N);CHKERRQ(ierr); 11553a40ed3dSBarry Smith } else { 1156f69a0ea3SMatthew Knepley ierr = MatSetSizes(A,0,0,M,N);CHKERRQ(ierr); 115795373324SBarry Smith } 1158f204ca49SKris Buschelman /* This is just a temporary matrix, so explicitly using MATMPIAIJ is probably best */ 1159f204ca49SKris Buschelman ierr = MatSetType(A,MATMPIAIJ);CHKERRQ(ierr); 1160f204ca49SKris Buschelman ierr = MatMPIAIJSetPreallocation(A,0,PETSC_NULL,0,PETSC_NULL);CHKERRQ(ierr); 116152e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,A);CHKERRQ(ierr); 1162416022c9SBarry Smith 116395373324SBarry Smith /* copy over the A part */ 1164ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->A->data; 1165d0f46423SBarry Smith m = aij->A->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1166d0f46423SBarry Smith row = mat->rmap->rstart; 1167d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] += mat->cmap->rstart ;} 116895373324SBarry Smith for (i=0; i<m; i++) { 1169416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],aj,a,INSERT_VALUES);CHKERRQ(ierr); 117095373324SBarry Smith row++; a += ai[i+1]-ai[i]; aj += ai[i+1]-ai[i]; 117195373324SBarry Smith } 11722ee70a88SLois Curfman McInnes aj = Aloc->j; 1173d0f46423SBarry Smith for (i=0; i<ai[m]; i++) {aj[i] -= mat->cmap->rstart;} 117495373324SBarry Smith 117595373324SBarry Smith /* copy over the B part */ 1176ec8511deSBarry Smith Aloc = (Mat_SeqAIJ*)aij->B->data; 1177d0f46423SBarry Smith m = aij->B->rmap->n; ai = Aloc->i; aj = Aloc->j; a = Aloc->a; 1178d0f46423SBarry Smith row = mat->rmap->rstart; 1179b1d57f15SBarry Smith ierr = PetscMalloc((ai[m]+1)*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1180b0a32e0cSBarry Smith ct = cols; 1181bfec09a0SHong Zhang for (i=0; i<ai[m]; i++) {cols[i] = aij->garray[aj[i]];} 118295373324SBarry Smith for (i=0; i<m; i++) { 1183416022c9SBarry Smith ierr = MatSetValues(A,1,&row,ai[i+1]-ai[i],cols,a,INSERT_VALUES);CHKERRQ(ierr); 118495373324SBarry Smith row++; a += ai[i+1]-ai[i]; cols += ai[i+1]-ai[i]; 118595373324SBarry Smith } 1186606d414cSSatish Balay ierr = PetscFree(ct);CHKERRQ(ierr); 11876d4a8577SBarry Smith ierr = MatAssemblyBegin(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 11886d4a8577SBarry Smith ierr = MatAssemblyEnd(A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 118955843e3eSBarry Smith /* 119055843e3eSBarry Smith Everyone has to call to draw the matrix since the graphics waits are 1191b0a32e0cSBarry Smith synchronized across all processors that share the PetscDraw object 119255843e3eSBarry Smith */ 1193b0a32e0cSBarry Smith ierr = PetscViewerGetSingleton(viewer,&sviewer);CHKERRQ(ierr); 1194e03a110bSBarry Smith if (!rank) { 11957adad957SLisandro Dalcin ierr = PetscObjectSetName((PetscObject)((Mat_MPIAIJ*)(A->data))->A,((PetscObject)mat)->name);CHKERRQ(ierr); 11967566de4bSShri Abhyankar /* Set the type name to MATMPIAIJ so that the correct type can be printed out by PetscObjectPrintClassNamePrefixType() in MatView_SeqAIJ_ASCII()*/ 11977566de4bSShri Abhyankar PetscStrcpy(((PetscObject)((Mat_MPIAIJ*)(A->data))->A)->type_name,MATMPIAIJ); 11986831982aSBarry Smith ierr = MatView(((Mat_MPIAIJ*)(A->data))->A,sviewer);CHKERRQ(ierr); 119995373324SBarry Smith } 1200b0a32e0cSBarry Smith ierr = PetscViewerRestoreSingleton(viewer,&sviewer);CHKERRQ(ierr); 120178b31e54SBarry Smith ierr = MatDestroy(A);CHKERRQ(ierr); 120295373324SBarry Smith } 12033a40ed3dSBarry Smith PetscFunctionReturn(0); 12041eb62cbbSBarry Smith } 12051eb62cbbSBarry Smith 12064a2ae208SSatish Balay #undef __FUNCT__ 12074a2ae208SSatish Balay #define __FUNCT__ "MatView_MPIAIJ" 1208dfbe8321SBarry Smith PetscErrorCode MatView_MPIAIJ(Mat mat,PetscViewer viewer) 1209416022c9SBarry Smith { 1210dfbe8321SBarry Smith PetscErrorCode ierr; 1211ace3abfcSBarry Smith PetscBool iascii,isdraw,issocket,isbinary; 1212416022c9SBarry Smith 12133a40ed3dSBarry Smith PetscFunctionBegin; 12142692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr); 12152692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERDRAW,&isdraw);CHKERRQ(ierr); 12162692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERBINARY,&isbinary);CHKERRQ(ierr); 12172692d6eeSBarry Smith ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSOCKET,&issocket);CHKERRQ(ierr); 121832077d6dSBarry Smith if (iascii || isdraw || isbinary || issocket) { 12197b2a1423SBarry Smith ierr = MatView_MPIAIJ_ASCIIorDraworSocket(mat,viewer);CHKERRQ(ierr); 12205cd90555SBarry Smith } else { 1221e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"Viewer type %s not supported by MPIAIJ matrices",((PetscObject)viewer)->type_name); 1222416022c9SBarry Smith } 12233a40ed3dSBarry Smith PetscFunctionReturn(0); 1224416022c9SBarry Smith } 1225416022c9SBarry Smith 12264a2ae208SSatish Balay #undef __FUNCT__ 122741f059aeSBarry Smith #define __FUNCT__ "MatSOR_MPIAIJ" 122841f059aeSBarry Smith PetscErrorCode MatSOR_MPIAIJ(Mat matin,Vec bb,PetscReal omega,MatSORType flag,PetscReal fshift,PetscInt its,PetscInt lits,Vec xx) 12298a729477SBarry Smith { 123044a69424SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1231dfbe8321SBarry Smith PetscErrorCode ierr; 12326987fefcSBarry Smith Vec bb1 = 0; 1233ace3abfcSBarry Smith PetscBool hasop; 12348a729477SBarry Smith 12353a40ed3dSBarry Smith PetscFunctionBegin; 123685911e72SJed Brown if (its > 1 || ~flag & SOR_ZERO_INITIAL_GUESS || flag & SOR_EISENSTAT) { 123785911e72SJed Brown ierr = VecDuplicate(bb,&bb1);CHKERRQ(ierr); 123885911e72SJed Brown } 12392798e883SHong Zhang 1240a2b30743SBarry Smith if (flag == SOR_APPLY_UPPER) { 124141f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 1242a2b30743SBarry Smith PetscFunctionReturn(0); 1243a2b30743SBarry Smith } 1244a2b30743SBarry Smith 1245c16cb8f2SBarry Smith if ((flag & SOR_LOCAL_SYMMETRIC_SWEEP) == SOR_LOCAL_SYMMETRIC_SWEEP){ 1246da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 124741f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12482798e883SHong Zhang its--; 1249da3a660dSBarry Smith } 12502798e883SHong Zhang 12512798e883SHong Zhang while (its--) { 1252ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1253ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12542798e883SHong Zhang 1255c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1256efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1257c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12582798e883SHong Zhang 1259c14dc6b6SHong Zhang /* local sweep */ 126041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_SYMMETRIC_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12612798e883SHong Zhang } 12623a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_FORWARD_SWEEP){ 1263da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 126441f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12652798e883SHong Zhang its--; 1266da3a660dSBarry Smith } 12672798e883SHong Zhang while (its--) { 1268ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1269ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12702798e883SHong Zhang 1271c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1272efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1273c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 1274c14dc6b6SHong Zhang 1275c14dc6b6SHong Zhang /* local sweep */ 127641f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_FORWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12772798e883SHong Zhang } 12783a40ed3dSBarry Smith } else if (flag & SOR_LOCAL_BACKWARD_SWEEP){ 1279da3a660dSBarry Smith if (flag & SOR_ZERO_INITIAL_GUESS) { 128041f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,flag,fshift,lits,1,xx);CHKERRQ(ierr); 12812798e883SHong Zhang its--; 1282da3a660dSBarry Smith } 12832798e883SHong Zhang while (its--) { 1284ca9f406cSSatish Balay ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1285ca9f406cSSatish Balay ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 12862798e883SHong Zhang 1287c14dc6b6SHong Zhang /* update rhs: bb1 = bb - B*x */ 1288efb30889SBarry Smith ierr = VecScale(mat->lvec,-1.0);CHKERRQ(ierr); 1289c14dc6b6SHong Zhang ierr = (*mat->B->ops->multadd)(mat->B,mat->lvec,bb,bb1);CHKERRQ(ierr); 12902798e883SHong Zhang 1291c14dc6b6SHong Zhang /* local sweep */ 129241f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,SOR_BACKWARD_SWEEP,fshift,lits,1,xx);CHKERRQ(ierr); 12932798e883SHong Zhang } 1294a7420bb7SBarry Smith } else if (flag & SOR_EISENSTAT) { 1295a7420bb7SBarry Smith Vec xx1; 1296a7420bb7SBarry Smith 1297a7420bb7SBarry Smith ierr = VecDuplicate(bb,&xx1);CHKERRQ(ierr); 129841f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_BACKWARD_SWEEP),fshift,lits,1,xx);CHKERRQ(ierr); 1299a7420bb7SBarry Smith 1300a7420bb7SBarry Smith ierr = VecScatterBegin(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1301a7420bb7SBarry Smith ierr = VecScatterEnd(mat->Mvctx,xx,mat->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1302a7420bb7SBarry Smith if (!mat->diag) { 1303a7420bb7SBarry Smith ierr = MatGetVecs(matin,&mat->diag,PETSC_NULL);CHKERRQ(ierr); 1304a7420bb7SBarry Smith ierr = MatGetDiagonal(matin,mat->diag);CHKERRQ(ierr); 1305a7420bb7SBarry Smith } 1306bd0c2dcbSBarry Smith ierr = MatHasOperation(matin,MATOP_MULT_DIAGONAL_BLOCK,&hasop);CHKERRQ(ierr); 1307bd0c2dcbSBarry Smith if (hasop) { 1308bd0c2dcbSBarry Smith ierr = MatMultDiagonalBlock(matin,xx,bb1);CHKERRQ(ierr); 1309bd0c2dcbSBarry Smith } else { 1310a7420bb7SBarry Smith ierr = VecPointwiseMult(bb1,mat->diag,xx);CHKERRQ(ierr); 1311bd0c2dcbSBarry Smith } 1312887ee2caSBarry Smith ierr = VecAYPX(bb1,(omega-2.0)/omega,bb);CHKERRQ(ierr); 1313887ee2caSBarry Smith 1314a7420bb7SBarry Smith ierr = MatMultAdd(mat->B,mat->lvec,bb1,bb1);CHKERRQ(ierr); 1315a7420bb7SBarry Smith 1316a7420bb7SBarry Smith /* local sweep */ 131741f059aeSBarry Smith ierr = (*mat->A->ops->sor)(mat->A,bb1,omega,(MatSORType)(SOR_ZERO_INITIAL_GUESS | SOR_LOCAL_FORWARD_SWEEP),fshift,lits,1,xx1);CHKERRQ(ierr); 1318a7420bb7SBarry Smith ierr = VecAXPY(xx,1.0,xx1);CHKERRQ(ierr); 1319a7420bb7SBarry Smith ierr = VecDestroy(xx1);CHKERRQ(ierr); 13203a40ed3dSBarry Smith } else { 1321e7e72b3dSBarry Smith SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"Parallel SOR not supported"); 1322c16cb8f2SBarry Smith } 1323c14dc6b6SHong Zhang 13246987fefcSBarry Smith if (bb1) {ierr = VecDestroy(bb1);CHKERRQ(ierr);} 13253a40ed3dSBarry Smith PetscFunctionReturn(0); 13268a729477SBarry Smith } 1327a66be287SLois Curfman McInnes 13284a2ae208SSatish Balay #undef __FUNCT__ 132942e855d1Svictor #define __FUNCT__ "MatPermute_MPIAIJ" 133042e855d1Svictor PetscErrorCode MatPermute_MPIAIJ(Mat A,IS rowp,IS colp,Mat *B) 133142e855d1Svictor { 133242e855d1Svictor MPI_Comm comm,pcomm; 13335d0c19d7SBarry Smith PetscInt first,local_size,nrows; 13345d0c19d7SBarry Smith const PetscInt *rows; 1335dbf0e21dSBarry Smith PetscMPIInt size; 133642e855d1Svictor IS crowp,growp,irowp,lrowp,lcolp,icolp; 133742e855d1Svictor PetscErrorCode ierr; 133842e855d1Svictor 133942e855d1Svictor PetscFunctionBegin; 134042e855d1Svictor ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr); 134142e855d1Svictor /* make a collective version of 'rowp' */ 134242e855d1Svictor ierr = PetscObjectGetComm((PetscObject)rowp,&pcomm);CHKERRQ(ierr); 134342e855d1Svictor if (pcomm==comm) { 134442e855d1Svictor crowp = rowp; 134542e855d1Svictor } else { 134642e855d1Svictor ierr = ISGetSize(rowp,&nrows);CHKERRQ(ierr); 134742e855d1Svictor ierr = ISGetIndices(rowp,&rows);CHKERRQ(ierr); 134870b3c8c7SBarry Smith ierr = ISCreateGeneral(comm,nrows,rows,PETSC_COPY_VALUES,&crowp);CHKERRQ(ierr); 134942e855d1Svictor ierr = ISRestoreIndices(rowp,&rows);CHKERRQ(ierr); 135042e855d1Svictor } 135142e855d1Svictor /* collect the global row permutation and invert it */ 135242e855d1Svictor ierr = ISAllGather(crowp,&growp);CHKERRQ(ierr); 135342e855d1Svictor ierr = ISSetPermutation(growp);CHKERRQ(ierr); 135442e855d1Svictor if (pcomm!=comm) { 135542e855d1Svictor ierr = ISDestroy(crowp);CHKERRQ(ierr); 135642e855d1Svictor } 135742e855d1Svictor ierr = ISInvertPermutation(growp,PETSC_DECIDE,&irowp);CHKERRQ(ierr); 135842e855d1Svictor /* get the local target indices */ 135942e855d1Svictor ierr = MatGetOwnershipRange(A,&first,PETSC_NULL);CHKERRQ(ierr); 136042e855d1Svictor ierr = MatGetLocalSize(A,&local_size,PETSC_NULL);CHKERRQ(ierr); 136142e855d1Svictor ierr = ISGetIndices(irowp,&rows);CHKERRQ(ierr); 136270b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,local_size,rows+first,PETSC_COPY_VALUES,&lrowp);CHKERRQ(ierr); 136342e855d1Svictor ierr = ISRestoreIndices(irowp,&rows);CHKERRQ(ierr); 136442e855d1Svictor ierr = ISDestroy(irowp);CHKERRQ(ierr); 136542e855d1Svictor /* the column permutation is so much easier; 136642e855d1Svictor make a local version of 'colp' and invert it */ 136742e855d1Svictor ierr = PetscObjectGetComm((PetscObject)colp,&pcomm);CHKERRQ(ierr); 1368dbf0e21dSBarry Smith ierr = MPI_Comm_size(pcomm,&size);CHKERRQ(ierr); 1369dbf0e21dSBarry Smith if (size==1) { 137042e855d1Svictor lcolp = colp; 137142e855d1Svictor } else { 137242e855d1Svictor ierr = ISGetSize(colp,&nrows);CHKERRQ(ierr); 137342e855d1Svictor ierr = ISGetIndices(colp,&rows);CHKERRQ(ierr); 137470b3c8c7SBarry Smith ierr = ISCreateGeneral(MPI_COMM_SELF,nrows,rows,PETSC_COPY_VALUES,&lcolp);CHKERRQ(ierr); 137542e855d1Svictor } 1376dbf0e21dSBarry Smith ierr = ISSetPermutation(lcolp);CHKERRQ(ierr); 137742e855d1Svictor ierr = ISInvertPermutation(lcolp,PETSC_DECIDE,&icolp);CHKERRQ(ierr); 13784aa3045dSJed Brown ierr = ISSetPermutation(icolp);CHKERRQ(ierr); 1379dbf0e21dSBarry Smith if (size>1) { 138042e855d1Svictor ierr = ISRestoreIndices(colp,&rows);CHKERRQ(ierr); 138142e855d1Svictor ierr = ISDestroy(lcolp);CHKERRQ(ierr); 138242e855d1Svictor } 138342e855d1Svictor /* now we just get the submatrix */ 13844aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(A,lrowp,icolp,local_size,MAT_INITIAL_MATRIX,B);CHKERRQ(ierr); 138542e855d1Svictor /* clean up */ 138642e855d1Svictor ierr = ISDestroy(lrowp);CHKERRQ(ierr); 138742e855d1Svictor ierr = ISDestroy(icolp);CHKERRQ(ierr); 138842e855d1Svictor PetscFunctionReturn(0); 138942e855d1Svictor } 139042e855d1Svictor 139142e855d1Svictor #undef __FUNCT__ 13924a2ae208SSatish Balay #define __FUNCT__ "MatGetInfo_MPIAIJ" 1393dfbe8321SBarry Smith PetscErrorCode MatGetInfo_MPIAIJ(Mat matin,MatInfoType flag,MatInfo *info) 1394a66be287SLois Curfman McInnes { 1395a66be287SLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 1396a66be287SLois Curfman McInnes Mat A = mat->A,B = mat->B; 1397dfbe8321SBarry Smith PetscErrorCode ierr; 1398329f5518SBarry Smith PetscReal isend[5],irecv[5]; 1399a66be287SLois Curfman McInnes 14003a40ed3dSBarry Smith PetscFunctionBegin; 14014e220ebcSLois Curfman McInnes info->block_size = 1.0; 14024e220ebcSLois Curfman McInnes ierr = MatGetInfo(A,MAT_LOCAL,info);CHKERRQ(ierr); 14034e220ebcSLois Curfman McInnes isend[0] = info->nz_used; isend[1] = info->nz_allocated; isend[2] = info->nz_unneeded; 14044e220ebcSLois Curfman McInnes isend[3] = info->memory; isend[4] = info->mallocs; 14054e220ebcSLois Curfman McInnes ierr = MatGetInfo(B,MAT_LOCAL,info);CHKERRQ(ierr); 14064e220ebcSLois Curfman McInnes isend[0] += info->nz_used; isend[1] += info->nz_allocated; isend[2] += info->nz_unneeded; 14074e220ebcSLois Curfman McInnes isend[3] += info->memory; isend[4] += info->mallocs; 1408a66be287SLois Curfman McInnes if (flag == MAT_LOCAL) { 14094e220ebcSLois Curfman McInnes info->nz_used = isend[0]; 14104e220ebcSLois Curfman McInnes info->nz_allocated = isend[1]; 14114e220ebcSLois Curfman McInnes info->nz_unneeded = isend[2]; 14124e220ebcSLois Curfman McInnes info->memory = isend[3]; 14134e220ebcSLois Curfman McInnes info->mallocs = isend[4]; 1414a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_MAX) { 14157adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_MAX,((PetscObject)matin)->comm);CHKERRQ(ierr); 14164e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 14174e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 14184e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 14194e220ebcSLois Curfman McInnes info->memory = irecv[3]; 14204e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1421a66be287SLois Curfman McInnes } else if (flag == MAT_GLOBAL_SUM) { 14227adad957SLisandro Dalcin ierr = MPI_Allreduce(isend,irecv,5,MPIU_REAL,MPI_SUM,((PetscObject)matin)->comm);CHKERRQ(ierr); 14234e220ebcSLois Curfman McInnes info->nz_used = irecv[0]; 14244e220ebcSLois Curfman McInnes info->nz_allocated = irecv[1]; 14254e220ebcSLois Curfman McInnes info->nz_unneeded = irecv[2]; 14264e220ebcSLois Curfman McInnes info->memory = irecv[3]; 14274e220ebcSLois Curfman McInnes info->mallocs = irecv[4]; 1428a66be287SLois Curfman McInnes } 14294e220ebcSLois Curfman McInnes info->fill_ratio_given = 0; /* no parallel LU/ILU/Cholesky */ 14304e220ebcSLois Curfman McInnes info->fill_ratio_needed = 0; 14314e220ebcSLois Curfman McInnes info->factor_mallocs = 0; 14324e220ebcSLois Curfman McInnes 14333a40ed3dSBarry Smith PetscFunctionReturn(0); 1434a66be287SLois Curfman McInnes } 1435a66be287SLois Curfman McInnes 14364a2ae208SSatish Balay #undef __FUNCT__ 14374a2ae208SSatish Balay #define __FUNCT__ "MatSetOption_MPIAIJ" 1438ace3abfcSBarry Smith PetscErrorCode MatSetOption_MPIAIJ(Mat A,MatOption op,PetscBool flg) 1439c74985f6SBarry Smith { 1440c0bbcb79SLois Curfman McInnes Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1441dfbe8321SBarry Smith PetscErrorCode ierr; 1442c74985f6SBarry Smith 14433a40ed3dSBarry Smith PetscFunctionBegin; 144412c028f9SKris Buschelman switch (op) { 1445512a5fc5SBarry Smith case MAT_NEW_NONZERO_LOCATIONS: 144612c028f9SKris Buschelman case MAT_NEW_NONZERO_ALLOCATION_ERR: 144728b2fa4aSMatthew Knepley case MAT_UNUSED_NONZERO_LOCATION_ERR: 1448a9817697SBarry Smith case MAT_KEEP_NONZERO_PATTERN: 144912c028f9SKris Buschelman case MAT_NEW_NONZERO_LOCATION_ERR: 145012c028f9SKris Buschelman case MAT_USE_INODES: 145112c028f9SKris Buschelman case MAT_IGNORE_ZERO_ENTRIES: 14524e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 14534e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 145412c028f9SKris Buschelman break; 145512c028f9SKris Buschelman case MAT_ROW_ORIENTED: 14564e0d8c25SBarry Smith a->roworiented = flg; 14574e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 14584e0d8c25SBarry Smith ierr = MatSetOption(a->B,op,flg);CHKERRQ(ierr); 145912c028f9SKris Buschelman break; 14604e0d8c25SBarry Smith case MAT_NEW_DIAGONALS: 1461290bbb0aSBarry Smith ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr); 146212c028f9SKris Buschelman break; 146312c028f9SKris Buschelman case MAT_IGNORE_OFF_PROC_ENTRIES: 14647c922b88SBarry Smith a->donotstash = PETSC_TRUE; 146512c028f9SKris Buschelman break; 1466ffa07934SHong Zhang case MAT_SPD: 1467ffa07934SHong Zhang A->spd_set = PETSC_TRUE; 1468ffa07934SHong Zhang A->spd = flg; 1469ffa07934SHong Zhang if (flg) { 1470ffa07934SHong Zhang A->symmetric = PETSC_TRUE; 1471ffa07934SHong Zhang A->structurally_symmetric = PETSC_TRUE; 1472ffa07934SHong Zhang A->symmetric_set = PETSC_TRUE; 1473ffa07934SHong Zhang A->structurally_symmetric_set = PETSC_TRUE; 1474ffa07934SHong Zhang } 1475ffa07934SHong Zhang break; 147677e54ba9SKris Buschelman case MAT_SYMMETRIC: 14774e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 147825f421beSHong Zhang break; 147977e54ba9SKris Buschelman case MAT_STRUCTURALLY_SYMMETRIC: 1480eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1481eeffb40dSHong Zhang break; 1482bf108f30SBarry Smith case MAT_HERMITIAN: 1483eeffb40dSHong Zhang ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 1484eeffb40dSHong Zhang break; 1485bf108f30SBarry Smith case MAT_SYMMETRY_ETERNAL: 14864e0d8c25SBarry Smith ierr = MatSetOption(a->A,op,flg);CHKERRQ(ierr); 148777e54ba9SKris Buschelman break; 148812c028f9SKris Buschelman default: 1489e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"unknown option %d",op); 14903a40ed3dSBarry Smith } 14913a40ed3dSBarry Smith PetscFunctionReturn(0); 1492c74985f6SBarry Smith } 1493c74985f6SBarry Smith 14944a2ae208SSatish Balay #undef __FUNCT__ 14954a2ae208SSatish Balay #define __FUNCT__ "MatGetRow_MPIAIJ" 1496b1d57f15SBarry Smith PetscErrorCode MatGetRow_MPIAIJ(Mat matin,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 149739e00950SLois Curfman McInnes { 1498154123eaSLois Curfman McInnes Mat_MPIAIJ *mat = (Mat_MPIAIJ*)matin->data; 149987828ca2SBarry Smith PetscScalar *vworkA,*vworkB,**pvA,**pvB,*v_p; 15006849ba73SBarry Smith PetscErrorCode ierr; 1501d0f46423SBarry Smith PetscInt i,*cworkA,*cworkB,**pcA,**pcB,cstart = matin->cmap->rstart; 1502d0f46423SBarry Smith PetscInt nztot,nzA,nzB,lrow,rstart = matin->rmap->rstart,rend = matin->rmap->rend; 1503b1d57f15SBarry Smith PetscInt *cmap,*idx_p; 150439e00950SLois Curfman McInnes 15053a40ed3dSBarry Smith PetscFunctionBegin; 1506e32f2f54SBarry Smith if (mat->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Already active"); 15077a0afa10SBarry Smith mat->getrowactive = PETSC_TRUE; 15087a0afa10SBarry Smith 150970f0671dSBarry Smith if (!mat->rowvalues && (idx || v)) { 15107a0afa10SBarry Smith /* 15117a0afa10SBarry Smith allocate enough space to hold information from the longest row. 15127a0afa10SBarry Smith */ 15137a0afa10SBarry Smith Mat_SeqAIJ *Aa = (Mat_SeqAIJ*)mat->A->data,*Ba = (Mat_SeqAIJ*)mat->B->data; 1514b1d57f15SBarry Smith PetscInt max = 1,tmp; 1515d0f46423SBarry Smith for (i=0; i<matin->rmap->n; i++) { 15167a0afa10SBarry Smith tmp = Aa->i[i+1] - Aa->i[i] + Ba->i[i+1] - Ba->i[i]; 15177a0afa10SBarry Smith if (max < tmp) { max = tmp; } 15187a0afa10SBarry Smith } 15191d79065fSBarry Smith ierr = PetscMalloc2(max,PetscScalar,&mat->rowvalues,max,PetscInt,&mat->rowindices);CHKERRQ(ierr); 15207a0afa10SBarry Smith } 15217a0afa10SBarry Smith 1522e7e72b3dSBarry Smith if (row < rstart || row >= rend) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Only local rows"); 1523abc0e9e4SLois Curfman McInnes lrow = row - rstart; 152439e00950SLois Curfman McInnes 1525154123eaSLois Curfman McInnes pvA = &vworkA; pcA = &cworkA; pvB = &vworkB; pcB = &cworkB; 1526154123eaSLois Curfman McInnes if (!v) {pvA = 0; pvB = 0;} 1527154123eaSLois Curfman McInnes if (!idx) {pcA = 0; if (!v) pcB = 0;} 1528f830108cSBarry Smith ierr = (*mat->A->ops->getrow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1529f830108cSBarry Smith ierr = (*mat->B->ops->getrow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 1530154123eaSLois Curfman McInnes nztot = nzA + nzB; 1531154123eaSLois Curfman McInnes 153270f0671dSBarry Smith cmap = mat->garray; 1533154123eaSLois Curfman McInnes if (v || idx) { 1534154123eaSLois Curfman McInnes if (nztot) { 1535154123eaSLois Curfman McInnes /* Sort by increasing column numbers, assuming A and B already sorted */ 1536b1d57f15SBarry Smith PetscInt imark = -1; 1537154123eaSLois Curfman McInnes if (v) { 153870f0671dSBarry Smith *v = v_p = mat->rowvalues; 153939e00950SLois Curfman McInnes for (i=0; i<nzB; i++) { 154070f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) v_p[i] = vworkB[i]; 1541154123eaSLois Curfman McInnes else break; 1542154123eaSLois Curfman McInnes } 1543154123eaSLois Curfman McInnes imark = i; 154470f0671dSBarry Smith for (i=0; i<nzA; i++) v_p[imark+i] = vworkA[i]; 154570f0671dSBarry Smith for (i=imark; i<nzB; i++) v_p[nzA+i] = vworkB[i]; 1546154123eaSLois Curfman McInnes } 1547154123eaSLois Curfman McInnes if (idx) { 154870f0671dSBarry Smith *idx = idx_p = mat->rowindices; 154970f0671dSBarry Smith if (imark > -1) { 155070f0671dSBarry Smith for (i=0; i<imark; i++) { 155170f0671dSBarry Smith idx_p[i] = cmap[cworkB[i]]; 155270f0671dSBarry Smith } 155370f0671dSBarry Smith } else { 1554154123eaSLois Curfman McInnes for (i=0; i<nzB; i++) { 155570f0671dSBarry Smith if (cmap[cworkB[i]] < cstart) idx_p[i] = cmap[cworkB[i]]; 1556154123eaSLois Curfman McInnes else break; 1557154123eaSLois Curfman McInnes } 1558154123eaSLois Curfman McInnes imark = i; 155970f0671dSBarry Smith } 156070f0671dSBarry Smith for (i=0; i<nzA; i++) idx_p[imark+i] = cstart + cworkA[i]; 156170f0671dSBarry Smith for (i=imark; i<nzB; i++) idx_p[nzA+i] = cmap[cworkB[i]]; 156239e00950SLois Curfman McInnes } 15633f97c4b0SBarry Smith } else { 15641ca473b0SSatish Balay if (idx) *idx = 0; 15651ca473b0SSatish Balay if (v) *v = 0; 15661ca473b0SSatish Balay } 1567154123eaSLois Curfman McInnes } 156839e00950SLois Curfman McInnes *nz = nztot; 1569f830108cSBarry Smith ierr = (*mat->A->ops->restorerow)(mat->A,lrow,&nzA,pcA,pvA);CHKERRQ(ierr); 1570f830108cSBarry Smith ierr = (*mat->B->ops->restorerow)(mat->B,lrow,&nzB,pcB,pvB);CHKERRQ(ierr); 15713a40ed3dSBarry Smith PetscFunctionReturn(0); 157239e00950SLois Curfman McInnes } 157339e00950SLois Curfman McInnes 15744a2ae208SSatish Balay #undef __FUNCT__ 15754a2ae208SSatish Balay #define __FUNCT__ "MatRestoreRow_MPIAIJ" 1576b1d57f15SBarry Smith PetscErrorCode MatRestoreRow_MPIAIJ(Mat mat,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v) 157739e00950SLois Curfman McInnes { 15787a0afa10SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 15793a40ed3dSBarry Smith 15803a40ed3dSBarry Smith PetscFunctionBegin; 1581e7e72b3dSBarry Smith if (!aij->getrowactive) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"MatGetRow() must be called first"); 15827a0afa10SBarry Smith aij->getrowactive = PETSC_FALSE; 15833a40ed3dSBarry Smith PetscFunctionReturn(0); 158439e00950SLois Curfman McInnes } 158539e00950SLois Curfman McInnes 15864a2ae208SSatish Balay #undef __FUNCT__ 15874a2ae208SSatish Balay #define __FUNCT__ "MatNorm_MPIAIJ" 1588dfbe8321SBarry Smith PetscErrorCode MatNorm_MPIAIJ(Mat mat,NormType type,PetscReal *norm) 1589855ac2c5SLois Curfman McInnes { 1590855ac2c5SLois Curfman McInnes Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 1591ec8511deSBarry Smith Mat_SeqAIJ *amat = (Mat_SeqAIJ*)aij->A->data,*bmat = (Mat_SeqAIJ*)aij->B->data; 1592dfbe8321SBarry Smith PetscErrorCode ierr; 1593d0f46423SBarry Smith PetscInt i,j,cstart = mat->cmap->rstart; 1594329f5518SBarry Smith PetscReal sum = 0.0; 1595a77337e4SBarry Smith MatScalar *v; 159604ca555eSLois Curfman McInnes 15973a40ed3dSBarry Smith PetscFunctionBegin; 159817699dbbSLois Curfman McInnes if (aij->size == 1) { 159914183eadSLois Curfman McInnes ierr = MatNorm(aij->A,type,norm);CHKERRQ(ierr); 160037fa93a5SLois Curfman McInnes } else { 160104ca555eSLois Curfman McInnes if (type == NORM_FROBENIUS) { 160204ca555eSLois Curfman McInnes v = amat->a; 160304ca555eSLois Curfman McInnes for (i=0; i<amat->nz; i++) { 1604aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1605329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 160604ca555eSLois Curfman McInnes #else 160704ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 160804ca555eSLois Curfman McInnes #endif 160904ca555eSLois Curfman McInnes } 161004ca555eSLois Curfman McInnes v = bmat->a; 161104ca555eSLois Curfman McInnes for (i=0; i<bmat->nz; i++) { 1612aa482453SBarry Smith #if defined(PETSC_USE_COMPLEX) 1613329f5518SBarry Smith sum += PetscRealPart(PetscConj(*v)*(*v)); v++; 161404ca555eSLois Curfman McInnes #else 161504ca555eSLois Curfman McInnes sum += (*v)*(*v); v++; 161604ca555eSLois Curfman McInnes #endif 161704ca555eSLois Curfman McInnes } 16187adad957SLisandro Dalcin ierr = MPI_Allreduce(&sum,norm,1,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 161904ca555eSLois Curfman McInnes *norm = sqrt(*norm); 16203a40ed3dSBarry Smith } else if (type == NORM_1) { /* max column norm */ 1621329f5518SBarry Smith PetscReal *tmp,*tmp2; 1622b1d57f15SBarry Smith PetscInt *jj,*garray = aij->garray; 1623d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp);CHKERRQ(ierr); 1624d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N+1)*sizeof(PetscReal),&tmp2);CHKERRQ(ierr); 1625d0f46423SBarry Smith ierr = PetscMemzero(tmp,mat->cmap->N*sizeof(PetscReal));CHKERRQ(ierr); 162604ca555eSLois Curfman McInnes *norm = 0.0; 162704ca555eSLois Curfman McInnes v = amat->a; jj = amat->j; 162804ca555eSLois Curfman McInnes for (j=0; j<amat->nz; j++) { 1629bfec09a0SHong Zhang tmp[cstart + *jj++ ] += PetscAbsScalar(*v); v++; 163004ca555eSLois Curfman McInnes } 163104ca555eSLois Curfman McInnes v = bmat->a; jj = bmat->j; 163204ca555eSLois Curfman McInnes for (j=0; j<bmat->nz; j++) { 1633bfec09a0SHong Zhang tmp[garray[*jj++]] += PetscAbsScalar(*v); v++; 163404ca555eSLois Curfman McInnes } 1635d0f46423SBarry Smith ierr = MPI_Allreduce(tmp,tmp2,mat->cmap->N,MPIU_REAL,MPI_SUM,((PetscObject)mat)->comm);CHKERRQ(ierr); 1636d0f46423SBarry Smith for (j=0; j<mat->cmap->N; j++) { 163704ca555eSLois Curfman McInnes if (tmp2[j] > *norm) *norm = tmp2[j]; 163804ca555eSLois Curfman McInnes } 1639606d414cSSatish Balay ierr = PetscFree(tmp);CHKERRQ(ierr); 1640606d414cSSatish Balay ierr = PetscFree(tmp2);CHKERRQ(ierr); 16413a40ed3dSBarry Smith } else if (type == NORM_INFINITY) { /* max row norm */ 1642329f5518SBarry Smith PetscReal ntemp = 0.0; 1643d0f46423SBarry Smith for (j=0; j<aij->A->rmap->n; j++) { 1644bfec09a0SHong Zhang v = amat->a + amat->i[j]; 164504ca555eSLois Curfman McInnes sum = 0.0; 164604ca555eSLois Curfman McInnes for (i=0; i<amat->i[j+1]-amat->i[j]; i++) { 1647cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 164804ca555eSLois Curfman McInnes } 1649bfec09a0SHong Zhang v = bmat->a + bmat->i[j]; 165004ca555eSLois Curfman McInnes for (i=0; i<bmat->i[j+1]-bmat->i[j]; i++) { 1651cddf8d76SBarry Smith sum += PetscAbsScalar(*v); v++; 165204ca555eSLois Curfman McInnes } 1653515d9167SLois Curfman McInnes if (sum > ntemp) ntemp = sum; 165404ca555eSLois Curfman McInnes } 16557adad957SLisandro Dalcin ierr = MPI_Allreduce(&ntemp,norm,1,MPIU_REAL,MPI_MAX,((PetscObject)mat)->comm);CHKERRQ(ierr); 1656ca161407SBarry Smith } else { 1657e7e72b3dSBarry Smith SETERRQ(((PetscObject)mat)->comm,PETSC_ERR_SUP,"No support for two norm"); 165804ca555eSLois Curfman McInnes } 165937fa93a5SLois Curfman McInnes } 16603a40ed3dSBarry Smith PetscFunctionReturn(0); 1661855ac2c5SLois Curfman McInnes } 1662855ac2c5SLois Curfman McInnes 16634a2ae208SSatish Balay #undef __FUNCT__ 16644a2ae208SSatish Balay #define __FUNCT__ "MatTranspose_MPIAIJ" 1665fc4dec0aSBarry Smith PetscErrorCode MatTranspose_MPIAIJ(Mat A,MatReuse reuse,Mat *matout) 1666b7c46309SBarry Smith { 1667b7c46309SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1668da668accSHong Zhang Mat_SeqAIJ *Aloc=(Mat_SeqAIJ*)a->A->data,*Bloc=(Mat_SeqAIJ*)a->B->data; 1669dfbe8321SBarry Smith PetscErrorCode ierr; 1670d0f46423SBarry Smith PetscInt M = A->rmap->N,N = A->cmap->N,ma,na,mb,*ai,*aj,*bi,*bj,row,*cols,*cols_tmp,i,*d_nnz; 1671d0f46423SBarry Smith PetscInt cstart=A->cmap->rstart,ncol; 16723a40ed3dSBarry Smith Mat B; 1673a77337e4SBarry Smith MatScalar *array; 1674b7c46309SBarry Smith 16753a40ed3dSBarry Smith PetscFunctionBegin; 1676e7e72b3dSBarry Smith if (reuse == MAT_REUSE_MATRIX && A == *matout && M != N) SETERRQ(((PetscObject)A)->comm,PETSC_ERR_ARG_SIZ,"Square matrix only for in-place"); 1677da668accSHong Zhang 1678d0f46423SBarry Smith ma = A->rmap->n; na = A->cmap->n; mb = a->B->rmap->n; 1679da668accSHong Zhang ai = Aloc->i; aj = Aloc->j; 1680da668accSHong Zhang bi = Bloc->i; bj = Bloc->j; 1681fc73b1b3SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout == A) { 1682fc73b1b3SBarry Smith /* compute d_nnz for preallocation; o_nnz is approximated by d_nnz to avoid communication */ 1683fc73b1b3SBarry Smith ierr = PetscMalloc((1+na)*sizeof(PetscInt),&d_nnz);CHKERRQ(ierr); 1684da668accSHong Zhang ierr = PetscMemzero(d_nnz,(1+na)*sizeof(PetscInt));CHKERRQ(ierr); 1685da668accSHong Zhang for (i=0; i<ai[ma]; i++){ 1686da668accSHong Zhang d_nnz[aj[i]] ++; 1687da668accSHong Zhang aj[i] += cstart; /* global col index to be used by MatSetValues() */ 1688d4bb536fSBarry Smith } 1689d4bb536fSBarry Smith 16907adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm,&B);CHKERRQ(ierr); 1691d0f46423SBarry Smith ierr = MatSetSizes(B,A->cmap->n,A->rmap->n,N,M);CHKERRQ(ierr); 16927adad957SLisandro Dalcin ierr = MatSetType(B,((PetscObject)A)->type_name);CHKERRQ(ierr); 1693da668accSHong Zhang ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,d_nnz);CHKERRQ(ierr); 1694fc73b1b3SBarry Smith ierr = PetscFree(d_nnz);CHKERRQ(ierr); 1695fc4dec0aSBarry Smith } else { 1696fc4dec0aSBarry Smith B = *matout; 1697fc4dec0aSBarry Smith } 1698b7c46309SBarry Smith 1699b7c46309SBarry Smith /* copy over the A part */ 1700da668accSHong Zhang array = Aloc->a; 1701d0f46423SBarry Smith row = A->rmap->rstart; 1702da668accSHong Zhang for (i=0; i<ma; i++) { 1703da668accSHong Zhang ncol = ai[i+1]-ai[i]; 1704da668accSHong Zhang ierr = MatSetValues(B,ncol,aj,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 1705da668accSHong Zhang row++; array += ncol; aj += ncol; 1706b7c46309SBarry Smith } 1707b7c46309SBarry Smith aj = Aloc->j; 1708da668accSHong Zhang for (i=0; i<ai[ma]; i++) aj[i] -= cstart; /* resume local col index */ 1709b7c46309SBarry Smith 1710b7c46309SBarry Smith /* copy over the B part */ 1711fc73b1b3SBarry Smith ierr = PetscMalloc(bi[mb]*sizeof(PetscInt),&cols);CHKERRQ(ierr); 1712fc73b1b3SBarry Smith ierr = PetscMemzero(cols,bi[mb]*sizeof(PetscInt));CHKERRQ(ierr); 1713da668accSHong Zhang array = Bloc->a; 1714d0f46423SBarry Smith row = A->rmap->rstart; 1715da668accSHong Zhang for (i=0; i<bi[mb]; i++) {cols[i] = a->garray[bj[i]];} 171661a2fbbaSHong Zhang cols_tmp = cols; 1717da668accSHong Zhang for (i=0; i<mb; i++) { 1718da668accSHong Zhang ncol = bi[i+1]-bi[i]; 171961a2fbbaSHong Zhang ierr = MatSetValues(B,ncol,cols_tmp,1,&row,array,INSERT_VALUES);CHKERRQ(ierr); 172061a2fbbaSHong Zhang row++; array += ncol; cols_tmp += ncol; 1721b7c46309SBarry Smith } 1722fc73b1b3SBarry Smith ierr = PetscFree(cols);CHKERRQ(ierr); 1723fc73b1b3SBarry Smith 17246d4a8577SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 17256d4a8577SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 1726815cbec1SBarry Smith if (reuse == MAT_INITIAL_MATRIX || *matout != A) { 17270de55854SLois Curfman McInnes *matout = B; 17280de55854SLois Curfman McInnes } else { 1729eb6b5d47SBarry Smith ierr = MatHeaderMerge(A,B);CHKERRQ(ierr); 17300de55854SLois Curfman McInnes } 17313a40ed3dSBarry Smith PetscFunctionReturn(0); 1732b7c46309SBarry Smith } 1733b7c46309SBarry Smith 17344a2ae208SSatish Balay #undef __FUNCT__ 17354a2ae208SSatish Balay #define __FUNCT__ "MatDiagonalScale_MPIAIJ" 1736dfbe8321SBarry Smith PetscErrorCode MatDiagonalScale_MPIAIJ(Mat mat,Vec ll,Vec rr) 1737a008b906SSatish Balay { 17384b967eb1SSatish Balay Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 17394b967eb1SSatish Balay Mat a = aij->A,b = aij->B; 1740dfbe8321SBarry Smith PetscErrorCode ierr; 1741b1d57f15SBarry Smith PetscInt s1,s2,s3; 1742a008b906SSatish Balay 17433a40ed3dSBarry Smith PetscFunctionBegin; 17444b967eb1SSatish Balay ierr = MatGetLocalSize(mat,&s2,&s3);CHKERRQ(ierr); 17454b967eb1SSatish Balay if (rr) { 1746e1311b90SBarry Smith ierr = VecGetLocalSize(rr,&s1);CHKERRQ(ierr); 1747e32f2f54SBarry Smith if (s1!=s3) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"right vector non-conforming local size"); 17484b967eb1SSatish Balay /* Overlap communication with computation. */ 1749ca9f406cSSatish Balay ierr = VecScatterBegin(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1750a008b906SSatish Balay } 17514b967eb1SSatish Balay if (ll) { 1752e1311b90SBarry Smith ierr = VecGetLocalSize(ll,&s1);CHKERRQ(ierr); 1753e32f2f54SBarry Smith if (s1!=s2) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"left vector non-conforming local size"); 1754f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,ll,0);CHKERRQ(ierr); 17554b967eb1SSatish Balay } 17564b967eb1SSatish Balay /* scale the diagonal block */ 1757f830108cSBarry Smith ierr = (*a->ops->diagonalscale)(a,ll,rr);CHKERRQ(ierr); 17584b967eb1SSatish Balay 17594b967eb1SSatish Balay if (rr) { 17604b967eb1SSatish Balay /* Do a scatter end and then right scale the off-diagonal block */ 1761ca9f406cSSatish Balay ierr = VecScatterEnd(aij->Mvctx,rr,aij->lvec,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr); 1762f830108cSBarry Smith ierr = (*b->ops->diagonalscale)(b,0,aij->lvec);CHKERRQ(ierr); 17634b967eb1SSatish Balay } 17644b967eb1SSatish Balay 17653a40ed3dSBarry Smith PetscFunctionReturn(0); 1766a008b906SSatish Balay } 1767a008b906SSatish Balay 17684a2ae208SSatish Balay #undef __FUNCT__ 1769521d7252SBarry Smith #define __FUNCT__ "MatSetBlockSize_MPIAIJ" 1770521d7252SBarry Smith PetscErrorCode MatSetBlockSize_MPIAIJ(Mat A,PetscInt bs) 17715a838052SSatish Balay { 1772521d7252SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1773521d7252SBarry Smith PetscErrorCode ierr; 1774521d7252SBarry Smith 17753a40ed3dSBarry Smith PetscFunctionBegin; 1776521d7252SBarry Smith ierr = MatSetBlockSize(a->A,bs);CHKERRQ(ierr); 1777521d7252SBarry Smith ierr = MatSetBlockSize(a->B,bs);CHKERRQ(ierr); 1778829b6ff0SJed Brown ierr = PetscLayoutSetBlockSize(A->rmap,bs);CHKERRQ(ierr); 1779829b6ff0SJed Brown ierr = PetscLayoutSetBlockSize(A->cmap,bs);CHKERRQ(ierr); 17803a40ed3dSBarry Smith PetscFunctionReturn(0); 17815a838052SSatish Balay } 17824a2ae208SSatish Balay #undef __FUNCT__ 17834a2ae208SSatish Balay #define __FUNCT__ "MatSetUnfactored_MPIAIJ" 1784dfbe8321SBarry Smith PetscErrorCode MatSetUnfactored_MPIAIJ(Mat A) 1785bb5a7306SBarry Smith { 1786bb5a7306SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 1787dfbe8321SBarry Smith PetscErrorCode ierr; 17883a40ed3dSBarry Smith 17893a40ed3dSBarry Smith PetscFunctionBegin; 1790bb5a7306SBarry Smith ierr = MatSetUnfactored(a->A);CHKERRQ(ierr); 17913a40ed3dSBarry Smith PetscFunctionReturn(0); 1792bb5a7306SBarry Smith } 1793bb5a7306SBarry Smith 17944a2ae208SSatish Balay #undef __FUNCT__ 17954a2ae208SSatish Balay #define __FUNCT__ "MatEqual_MPIAIJ" 1796ace3abfcSBarry Smith PetscErrorCode MatEqual_MPIAIJ(Mat A,Mat B,PetscBool *flag) 1797d4bb536fSBarry Smith { 1798d4bb536fSBarry Smith Mat_MPIAIJ *matB = (Mat_MPIAIJ*)B->data,*matA = (Mat_MPIAIJ*)A->data; 1799d4bb536fSBarry Smith Mat a,b,c,d; 1800ace3abfcSBarry Smith PetscBool flg; 1801dfbe8321SBarry Smith PetscErrorCode ierr; 1802d4bb536fSBarry Smith 18033a40ed3dSBarry Smith PetscFunctionBegin; 1804d4bb536fSBarry Smith a = matA->A; b = matA->B; 1805d4bb536fSBarry Smith c = matB->A; d = matB->B; 1806d4bb536fSBarry Smith 1807d4bb536fSBarry Smith ierr = MatEqual(a,c,&flg);CHKERRQ(ierr); 1808abc0a331SBarry Smith if (flg) { 1809d4bb536fSBarry Smith ierr = MatEqual(b,d,&flg);CHKERRQ(ierr); 1810d4bb536fSBarry Smith } 18117adad957SLisandro Dalcin ierr = MPI_Allreduce(&flg,flag,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr); 18123a40ed3dSBarry Smith PetscFunctionReturn(0); 1813d4bb536fSBarry Smith } 1814d4bb536fSBarry Smith 18154a2ae208SSatish Balay #undef __FUNCT__ 18164a2ae208SSatish Balay #define __FUNCT__ "MatCopy_MPIAIJ" 1817dfbe8321SBarry Smith PetscErrorCode MatCopy_MPIAIJ(Mat A,Mat B,MatStructure str) 1818cb5b572fSBarry Smith { 1819dfbe8321SBarry Smith PetscErrorCode ierr; 1820cb5b572fSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 1821cb5b572fSBarry Smith Mat_MPIAIJ *b = (Mat_MPIAIJ *)B->data; 1822cb5b572fSBarry Smith 1823cb5b572fSBarry Smith PetscFunctionBegin; 182433f4a19fSKris Buschelman /* If the two matrices don't have the same copy implementation, they aren't compatible for fast copy. */ 182533f4a19fSKris Buschelman if ((str != SAME_NONZERO_PATTERN) || (A->ops->copy != B->ops->copy)) { 1826cb5b572fSBarry Smith /* because of the column compression in the off-processor part of the matrix a->B, 1827cb5b572fSBarry Smith the number of columns in a->B and b->B may be different, hence we cannot call 1828cb5b572fSBarry Smith the MatCopy() directly on the two parts. If need be, we can provide a more 1829cb5b572fSBarry Smith efficient copy than the MatCopy_Basic() by first uncompressing the a->B matrices 1830cb5b572fSBarry Smith then copying the submatrices */ 1831cb5b572fSBarry Smith ierr = MatCopy_Basic(A,B,str);CHKERRQ(ierr); 1832cb5b572fSBarry Smith } else { 1833cb5b572fSBarry Smith ierr = MatCopy(a->A,b->A,str);CHKERRQ(ierr); 1834cb5b572fSBarry Smith ierr = MatCopy(a->B,b->B,str);CHKERRQ(ierr); 1835cb5b572fSBarry Smith } 1836cb5b572fSBarry Smith PetscFunctionReturn(0); 1837cb5b572fSBarry Smith } 1838cb5b572fSBarry Smith 18394a2ae208SSatish Balay #undef __FUNCT__ 18404a2ae208SSatish Balay #define __FUNCT__ "MatSetUpPreallocation_MPIAIJ" 1841dfbe8321SBarry Smith PetscErrorCode MatSetUpPreallocation_MPIAIJ(Mat A) 1842273d9f13SBarry Smith { 1843dfbe8321SBarry Smith PetscErrorCode ierr; 1844273d9f13SBarry Smith 1845273d9f13SBarry Smith PetscFunctionBegin; 1846273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(A,PETSC_DEFAULT,0,PETSC_DEFAULT,0);CHKERRQ(ierr); 1847273d9f13SBarry Smith PetscFunctionReturn(0); 1848273d9f13SBarry Smith } 1849273d9f13SBarry Smith 1850ac90fabeSBarry Smith #undef __FUNCT__ 1851ac90fabeSBarry Smith #define __FUNCT__ "MatAXPY_MPIAIJ" 1852f4df32b1SMatthew Knepley PetscErrorCode MatAXPY_MPIAIJ(Mat Y,PetscScalar a,Mat X,MatStructure str) 1853ac90fabeSBarry Smith { 1854dfbe8321SBarry Smith PetscErrorCode ierr; 1855b1d57f15SBarry Smith PetscInt i; 1856ac90fabeSBarry Smith Mat_MPIAIJ *xx = (Mat_MPIAIJ *)X->data,*yy = (Mat_MPIAIJ *)Y->data; 18574ce68768SBarry Smith PetscBLASInt bnz,one=1; 1858ac90fabeSBarry Smith Mat_SeqAIJ *x,*y; 1859ac90fabeSBarry Smith 1860ac90fabeSBarry Smith PetscFunctionBegin; 1861ac90fabeSBarry Smith if (str == SAME_NONZERO_PATTERN) { 1862f4df32b1SMatthew Knepley PetscScalar alpha = a; 1863ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->A->data; 1864ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->A->data; 18650805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1866f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1867ac90fabeSBarry Smith x = (Mat_SeqAIJ *)xx->B->data; 1868ac90fabeSBarry Smith y = (Mat_SeqAIJ *)yy->B->data; 18690805154bSBarry Smith bnz = PetscBLASIntCast(x->nz); 1870f4df32b1SMatthew Knepley BLASaxpy_(&bnz,&alpha,x->a,&one,y->a,&one); 1871a30b2313SHong Zhang } else if (str == SUBSET_NONZERO_PATTERN) { 1872f4df32b1SMatthew Knepley ierr = MatAXPY_SeqAIJ(yy->A,a,xx->A,str);CHKERRQ(ierr); 1873c537a176SHong Zhang 1874c537a176SHong Zhang x = (Mat_SeqAIJ *)xx->B->data; 1875a30b2313SHong Zhang y = (Mat_SeqAIJ *)yy->B->data; 1876a30b2313SHong Zhang if (y->xtoy && y->XtoY != xx->B) { 1877a30b2313SHong Zhang ierr = PetscFree(y->xtoy);CHKERRQ(ierr); 1878a30b2313SHong Zhang ierr = MatDestroy(y->XtoY);CHKERRQ(ierr); 1879c537a176SHong Zhang } 1880a30b2313SHong Zhang if (!y->xtoy) { /* get xtoy */ 1881d0f46423SBarry Smith ierr = MatAXPYGetxtoy_Private(xx->B->rmap->n,x->i,x->j,xx->garray,y->i,y->j,yy->garray,&y->xtoy);CHKERRQ(ierr); 1882a30b2313SHong Zhang y->XtoY = xx->B; 1883407f6b05SHong Zhang ierr = PetscObjectReference((PetscObject)xx->B);CHKERRQ(ierr); 1884c537a176SHong Zhang } 1885f4df32b1SMatthew Knepley for (i=0; i<x->nz; i++) y->a[y->xtoy[i]] += a*(x->a[i]); 1886ac90fabeSBarry Smith } else { 18879f5f6813SShri Abhyankar Mat B; 18889f5f6813SShri Abhyankar PetscInt *nnz_d,*nnz_o; 18899f5f6813SShri Abhyankar ierr = PetscMalloc(yy->A->rmap->N*sizeof(PetscInt),&nnz_d);CHKERRQ(ierr); 18909f5f6813SShri Abhyankar ierr = PetscMalloc(yy->B->rmap->N*sizeof(PetscInt),&nnz_o);CHKERRQ(ierr); 18919f5f6813SShri Abhyankar ierr = MatCreate(((PetscObject)Y)->comm,&B);CHKERRQ(ierr); 1892bc5a2726SShri Abhyankar ierr = PetscObjectSetName((PetscObject)B,((PetscObject)Y)->name);CHKERRQ(ierr); 18939f5f6813SShri Abhyankar ierr = MatSetSizes(B,Y->rmap->n,Y->cmap->n,Y->rmap->N,Y->cmap->N);CHKERRQ(ierr); 18949f5f6813SShri Abhyankar ierr = MatSetType(B,MATMPIAIJ);CHKERRQ(ierr); 18959f5f6813SShri Abhyankar ierr = MatAXPYGetPreallocation_SeqAIJ(yy->A,xx->A,nnz_d);CHKERRQ(ierr); 18969f5f6813SShri Abhyankar ierr = MatAXPYGetPreallocation_SeqAIJ(yy->B,xx->B,nnz_o);CHKERRQ(ierr); 18979f5f6813SShri Abhyankar ierr = MatMPIAIJSetPreallocation(B,PETSC_NULL,nnz_d,PETSC_NULL,nnz_o);CHKERRQ(ierr); 18989f5f6813SShri Abhyankar ierr = MatAXPY_BasicWithPreallocation(B,Y,a,X,str);CHKERRQ(ierr); 18999f5f6813SShri Abhyankar ierr = MatHeaderReplace(Y,B); 19009f5f6813SShri Abhyankar ierr = PetscFree(nnz_d);CHKERRQ(ierr); 19019f5f6813SShri Abhyankar ierr = PetscFree(nnz_o);CHKERRQ(ierr); 1902ac90fabeSBarry Smith } 1903ac90fabeSBarry Smith PetscFunctionReturn(0); 1904ac90fabeSBarry Smith } 1905ac90fabeSBarry Smith 1906354c94deSBarry Smith EXTERN PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_SeqAIJ(Mat); 1907354c94deSBarry Smith 1908354c94deSBarry Smith #undef __FUNCT__ 1909354c94deSBarry Smith #define __FUNCT__ "MatConjugate_MPIAIJ" 1910354c94deSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatConjugate_MPIAIJ(Mat mat) 1911354c94deSBarry Smith { 1912354c94deSBarry Smith #if defined(PETSC_USE_COMPLEX) 1913354c94deSBarry Smith PetscErrorCode ierr; 1914354c94deSBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 1915354c94deSBarry Smith 1916354c94deSBarry Smith PetscFunctionBegin; 1917354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->A);CHKERRQ(ierr); 1918354c94deSBarry Smith ierr = MatConjugate_SeqAIJ(aij->B);CHKERRQ(ierr); 1919354c94deSBarry Smith #else 1920354c94deSBarry Smith PetscFunctionBegin; 1921354c94deSBarry Smith #endif 1922354c94deSBarry Smith PetscFunctionReturn(0); 1923354c94deSBarry Smith } 1924354c94deSBarry Smith 192599cafbc1SBarry Smith #undef __FUNCT__ 192699cafbc1SBarry Smith #define __FUNCT__ "MatRealPart_MPIAIJ" 192799cafbc1SBarry Smith PetscErrorCode MatRealPart_MPIAIJ(Mat A) 192899cafbc1SBarry Smith { 192999cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 193099cafbc1SBarry Smith PetscErrorCode ierr; 193199cafbc1SBarry Smith 193299cafbc1SBarry Smith PetscFunctionBegin; 193399cafbc1SBarry Smith ierr = MatRealPart(a->A);CHKERRQ(ierr); 193499cafbc1SBarry Smith ierr = MatRealPart(a->B);CHKERRQ(ierr); 193599cafbc1SBarry Smith PetscFunctionReturn(0); 193699cafbc1SBarry Smith } 193799cafbc1SBarry Smith 193899cafbc1SBarry Smith #undef __FUNCT__ 193999cafbc1SBarry Smith #define __FUNCT__ "MatImaginaryPart_MPIAIJ" 194099cafbc1SBarry Smith PetscErrorCode MatImaginaryPart_MPIAIJ(Mat A) 194199cafbc1SBarry Smith { 194299cafbc1SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 194399cafbc1SBarry Smith PetscErrorCode ierr; 194499cafbc1SBarry Smith 194599cafbc1SBarry Smith PetscFunctionBegin; 194699cafbc1SBarry Smith ierr = MatImaginaryPart(a->A);CHKERRQ(ierr); 194799cafbc1SBarry Smith ierr = MatImaginaryPart(a->B);CHKERRQ(ierr); 194899cafbc1SBarry Smith PetscFunctionReturn(0); 194999cafbc1SBarry Smith } 195099cafbc1SBarry Smith 1951103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 1952103bf8bdSMatthew Knepley 1953103bf8bdSMatthew Knepley #include <boost/parallel/mpi/bsp_process_group.hpp> 1954a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_default_graph.hpp> 1955a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_0_block.hpp> 1956a2c909beSMatthew Knepley #include <boost/graph/distributed/ilu_preconditioner.hpp> 1957103bf8bdSMatthew Knepley #include <boost/graph/distributed/petsc/interface.hpp> 1958a2c909beSMatthew Knepley #include <boost/multi_array.hpp> 1959d0f46423SBarry Smith #include <boost/parallel/distributed_property_map->hpp> 1960103bf8bdSMatthew Knepley 1961103bf8bdSMatthew Knepley #undef __FUNCT__ 1962103bf8bdSMatthew Knepley #define __FUNCT__ "MatILUFactorSymbolic_MPIAIJ" 1963103bf8bdSMatthew Knepley /* 1964103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 1965103bf8bdSMatthew Knepley */ 19660481f469SBarry Smith PetscErrorCode MatILUFactorSymbolic_MPIAIJ(Mat fact,Mat A, IS isrow, IS iscol, const MatFactorInfo *info) 1967103bf8bdSMatthew Knepley { 1968a2c909beSMatthew Knepley namespace petsc = boost::distributed::petsc; 1969a2c909beSMatthew Knepley 1970a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 1971a2c909beSMatthew Knepley using boost::graph::distributed::ilu_default::process_group_type; 1972a2c909beSMatthew Knepley using boost::graph::ilu_permuted; 1973a2c909beSMatthew Knepley 1974ace3abfcSBarry Smith PetscBool row_identity, col_identity; 1975776b82aeSLisandro Dalcin PetscContainer c; 1976103bf8bdSMatthew Knepley PetscInt m, n, M, N; 1977103bf8bdSMatthew Knepley PetscErrorCode ierr; 1978103bf8bdSMatthew Knepley 1979103bf8bdSMatthew Knepley PetscFunctionBegin; 1980e32f2f54SBarry Smith if (info->levels != 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"Only levels = 0 supported for parallel ilu"); 1981103bf8bdSMatthew Knepley ierr = ISIdentity(isrow, &row_identity);CHKERRQ(ierr); 1982103bf8bdSMatthew Knepley ierr = ISIdentity(iscol, &col_identity);CHKERRQ(ierr); 1983103bf8bdSMatthew Knepley if (!row_identity || !col_identity) { 1984e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Row and column permutations must be identity for parallel ILU"); 1985103bf8bdSMatthew Knepley } 1986103bf8bdSMatthew Knepley 1987103bf8bdSMatthew Knepley process_group_type pg; 1988a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 1989a2c909beSMatthew Knepley lgraph_type* lgraph_p = new lgraph_type(petsc::num_global_vertices(A), pg, petsc::matrix_distribution(A, pg)); 1990a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 1991a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 1992a2c909beSMatthew Knepley 1993103bf8bdSMatthew Knepley petsc::read_matrix(A, graph, get(boost::edge_weight, graph)); 1994a2c909beSMatthew Knepley ilu_permuted(level_graph); 1995103bf8bdSMatthew Knepley 1996103bf8bdSMatthew Knepley /* put together the new matrix */ 19977adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)A)->comm, fact);CHKERRQ(ierr); 1998103bf8bdSMatthew Knepley ierr = MatGetLocalSize(A, &m, &n);CHKERRQ(ierr); 1999103bf8bdSMatthew Knepley ierr = MatGetSize(A, &M, &N);CHKERRQ(ierr); 2000719d5645SBarry Smith ierr = MatSetSizes(fact, m, n, M, N);CHKERRQ(ierr); 2001719d5645SBarry Smith ierr = MatSetType(fact, ((PetscObject)A)->type_name);CHKERRQ(ierr); 2002719d5645SBarry Smith ierr = MatAssemblyBegin(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2003719d5645SBarry Smith ierr = MatAssemblyEnd(fact, MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 2004103bf8bdSMatthew Knepley 20057adad957SLisandro Dalcin ierr = PetscContainerCreate(((PetscObject)A)->comm, &c); 2006776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(c, lgraph_p); 2007719d5645SBarry Smith ierr = PetscObjectCompose((PetscObject) (fact), "graph", (PetscObject) c); 2008103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2009103bf8bdSMatthew Knepley } 2010103bf8bdSMatthew Knepley 2011103bf8bdSMatthew Knepley #undef __FUNCT__ 2012103bf8bdSMatthew Knepley #define __FUNCT__ "MatLUFactorNumeric_MPIAIJ" 20130481f469SBarry Smith PetscErrorCode MatLUFactorNumeric_MPIAIJ(Mat B,Mat A, const MatFactorInfo *info) 2014103bf8bdSMatthew Knepley { 2015103bf8bdSMatthew Knepley PetscFunctionBegin; 2016103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2017103bf8bdSMatthew Knepley } 2018103bf8bdSMatthew Knepley 2019103bf8bdSMatthew Knepley #undef __FUNCT__ 2020103bf8bdSMatthew Knepley #define __FUNCT__ "MatSolve_MPIAIJ" 2021103bf8bdSMatthew Knepley /* 2022103bf8bdSMatthew Knepley This uses the parallel ILU factorization of Peter Gottschling <pgottsch@osl.iu.edu> 2023103bf8bdSMatthew Knepley */ 2024103bf8bdSMatthew Knepley PetscErrorCode MatSolve_MPIAIJ(Mat A, Vec b, Vec x) 2025103bf8bdSMatthew Knepley { 2026a2c909beSMatthew Knepley namespace graph_dist = boost::graph::distributed; 2027a2c909beSMatthew Knepley 2028a2c909beSMatthew Knepley typedef graph_dist::ilu_default::ilu_level_graph_type lgraph_type; 2029a2c909beSMatthew Knepley lgraph_type* lgraph_p; 2030776b82aeSLisandro Dalcin PetscContainer c; 2031103bf8bdSMatthew Knepley PetscErrorCode ierr; 2032103bf8bdSMatthew Knepley 2033103bf8bdSMatthew Knepley PetscFunctionBegin; 2034103bf8bdSMatthew Knepley ierr = PetscObjectQuery((PetscObject) A, "graph", (PetscObject *) &c);CHKERRQ(ierr); 2035776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(c, (void **) &lgraph_p);CHKERRQ(ierr); 2036103bf8bdSMatthew Knepley ierr = VecCopy(b, x);CHKERRQ(ierr); 2037a2c909beSMatthew Knepley 2038a2c909beSMatthew Knepley PetscScalar* array_x; 2039a2c909beSMatthew Knepley ierr = VecGetArray(x, &array_x);CHKERRQ(ierr); 2040a2c909beSMatthew Knepley PetscInt sx; 2041a2c909beSMatthew Knepley ierr = VecGetSize(x, &sx);CHKERRQ(ierr); 2042a2c909beSMatthew Knepley 2043a2c909beSMatthew Knepley PetscScalar* array_b; 2044a2c909beSMatthew Knepley ierr = VecGetArray(b, &array_b);CHKERRQ(ierr); 2045a2c909beSMatthew Knepley PetscInt sb; 2046a2c909beSMatthew Knepley ierr = VecGetSize(b, &sb);CHKERRQ(ierr); 2047a2c909beSMatthew Knepley 2048a2c909beSMatthew Knepley lgraph_type& level_graph = *lgraph_p; 2049a2c909beSMatthew Knepley graph_dist::ilu_default::graph_type& graph(level_graph.graph); 2050a2c909beSMatthew Knepley 2051a2c909beSMatthew Knepley typedef boost::multi_array_ref<PetscScalar, 1> array_ref_type; 2052a2c909beSMatthew Knepley array_ref_type ref_b(array_b, boost::extents[num_vertices(graph)]), 2053a2c909beSMatthew Knepley ref_x(array_x, boost::extents[num_vertices(graph)]); 2054a2c909beSMatthew Knepley 2055a2c909beSMatthew Knepley typedef boost::iterator_property_map<array_ref_type::iterator, 2056a2c909beSMatthew Knepley boost::property_map<graph_dist::ilu_default::graph_type, boost::vertex_index_t>::type> gvector_type; 2057a2c909beSMatthew Knepley gvector_type vector_b(ref_b.begin(), get(boost::vertex_index, graph)), 2058a2c909beSMatthew Knepley vector_x(ref_x.begin(), get(boost::vertex_index, graph)); 2059a2c909beSMatthew Knepley 2060a2c909beSMatthew Knepley ilu_set_solve(*lgraph_p, vector_b, vector_x); 2061a2c909beSMatthew Knepley 2062103bf8bdSMatthew Knepley PetscFunctionReturn(0); 2063103bf8bdSMatthew Knepley } 2064103bf8bdSMatthew Knepley #endif 2065103bf8bdSMatthew Knepley 206669db28dcSHong Zhang typedef struct { /* used by MatGetRedundantMatrix() for reusing matredundant */ 206769db28dcSHong Zhang PetscInt nzlocal,nsends,nrecvs; 20681d79065fSBarry Smith PetscMPIInt *send_rank,*recv_rank; 20691d79065fSBarry Smith PetscInt *sbuf_nz,*rbuf_nz,*sbuf_j,**rbuf_j; 207069db28dcSHong Zhang PetscScalar *sbuf_a,**rbuf_a; 207169db28dcSHong Zhang PetscErrorCode (*MatDestroy)(Mat); 207269db28dcSHong Zhang } Mat_Redundant; 207369db28dcSHong Zhang 207469db28dcSHong Zhang #undef __FUNCT__ 207569db28dcSHong Zhang #define __FUNCT__ "PetscContainerDestroy_MatRedundant" 207669db28dcSHong Zhang PetscErrorCode PetscContainerDestroy_MatRedundant(void *ptr) 207769db28dcSHong Zhang { 207869db28dcSHong Zhang PetscErrorCode ierr; 207969db28dcSHong Zhang Mat_Redundant *redund=(Mat_Redundant*)ptr; 208069db28dcSHong Zhang PetscInt i; 208169db28dcSHong Zhang 208269db28dcSHong Zhang PetscFunctionBegin; 20831d79065fSBarry Smith ierr = PetscFree2(redund->send_rank,redund->recv_rank);CHKERRQ(ierr); 208469db28dcSHong Zhang ierr = PetscFree(redund->sbuf_j);CHKERRQ(ierr); 208569db28dcSHong Zhang ierr = PetscFree(redund->sbuf_a);CHKERRQ(ierr); 208669db28dcSHong Zhang for (i=0; i<redund->nrecvs; i++){ 208769db28dcSHong Zhang ierr = PetscFree(redund->rbuf_j[i]);CHKERRQ(ierr); 208869db28dcSHong Zhang ierr = PetscFree(redund->rbuf_a[i]);CHKERRQ(ierr); 208969db28dcSHong Zhang } 20901d79065fSBarry Smith ierr = PetscFree4(redund->sbuf_nz,redund->rbuf_nz,redund->rbuf_j,redund->rbuf_a);CHKERRQ(ierr); 209169db28dcSHong Zhang ierr = PetscFree(redund);CHKERRQ(ierr); 209269db28dcSHong Zhang PetscFunctionReturn(0); 209369db28dcSHong Zhang } 209469db28dcSHong Zhang 209569db28dcSHong Zhang #undef __FUNCT__ 209669db28dcSHong Zhang #define __FUNCT__ "MatDestroy_MatRedundant" 209769db28dcSHong Zhang PetscErrorCode MatDestroy_MatRedundant(Mat A) 209869db28dcSHong Zhang { 209969db28dcSHong Zhang PetscErrorCode ierr; 210069db28dcSHong Zhang PetscContainer container; 210169db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 210269db28dcSHong Zhang 210369db28dcSHong Zhang PetscFunctionBegin; 210469db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)A,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 210569db28dcSHong Zhang if (container) { 210669db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 210769db28dcSHong Zhang } else { 2108e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Container does not exit"); 210969db28dcSHong Zhang } 211069db28dcSHong Zhang A->ops->destroy = redund->MatDestroy; 211169db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)A,"Mat_Redundant",0);CHKERRQ(ierr); 211269db28dcSHong Zhang ierr = (*A->ops->destroy)(A);CHKERRQ(ierr); 211369db28dcSHong Zhang ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 211469db28dcSHong Zhang PetscFunctionReturn(0); 211569db28dcSHong Zhang } 211669db28dcSHong Zhang 211769db28dcSHong Zhang #undef __FUNCT__ 211869db28dcSHong Zhang #define __FUNCT__ "MatGetRedundantMatrix_MPIAIJ" 211969db28dcSHong Zhang PetscErrorCode MatGetRedundantMatrix_MPIAIJ(Mat mat,PetscInt nsubcomm,MPI_Comm subcomm,PetscInt mlocal_sub,MatReuse reuse,Mat *matredundant) 212069db28dcSHong Zhang { 212169db28dcSHong Zhang PetscMPIInt rank,size; 21227adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mat)->comm; 212369db28dcSHong Zhang PetscErrorCode ierr; 212469db28dcSHong Zhang PetscInt nsends=0,nrecvs=0,i,rownz_max=0; 212569db28dcSHong Zhang PetscMPIInt *send_rank=PETSC_NULL,*recv_rank=PETSC_NULL; 2126d0f46423SBarry Smith PetscInt *rowrange=mat->rmap->range; 212769db28dcSHong Zhang Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 212869db28dcSHong Zhang Mat A=aij->A,B=aij->B,C=*matredundant; 212969db28dcSHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)A->data,*b=(Mat_SeqAIJ*)B->data; 213069db28dcSHong Zhang PetscScalar *sbuf_a; 213169db28dcSHong Zhang PetscInt nzlocal=a->nz+b->nz; 2132d0f46423SBarry Smith PetscInt j,cstart=mat->cmap->rstart,cend=mat->cmap->rend,row,nzA,nzB,ncols,*cworkA,*cworkB; 2133d0f46423SBarry Smith PetscInt rstart=mat->rmap->rstart,rend=mat->rmap->rend,*bmap=aij->garray,M,N; 213469db28dcSHong Zhang PetscInt *cols,ctmp,lwrite,*rptr,l,*sbuf_j; 2135a77337e4SBarry Smith MatScalar *aworkA,*aworkB; 2136a77337e4SBarry Smith PetscScalar *vals; 213769db28dcSHong Zhang PetscMPIInt tag1,tag2,tag3,imdex; 213869db28dcSHong Zhang MPI_Request *s_waits1=PETSC_NULL,*s_waits2=PETSC_NULL,*s_waits3=PETSC_NULL, 213969db28dcSHong Zhang *r_waits1=PETSC_NULL,*r_waits2=PETSC_NULL,*r_waits3=PETSC_NULL; 214069db28dcSHong Zhang MPI_Status recv_status,*send_status; 214169db28dcSHong Zhang PetscInt *sbuf_nz=PETSC_NULL,*rbuf_nz=PETSC_NULL,count; 214269db28dcSHong Zhang PetscInt **rbuf_j=PETSC_NULL; 214369db28dcSHong Zhang PetscScalar **rbuf_a=PETSC_NULL; 214469db28dcSHong Zhang Mat_Redundant *redund=PETSC_NULL; 214569db28dcSHong Zhang PetscContainer container; 214669db28dcSHong Zhang 214769db28dcSHong Zhang PetscFunctionBegin; 214869db28dcSHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 214969db28dcSHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 215069db28dcSHong Zhang 215169db28dcSHong Zhang if (reuse == MAT_REUSE_MATRIX) { 215269db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2153e32f2f54SBarry Smith if (M != N || M != mat->rmap->N) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong global size"); 215469db28dcSHong Zhang ierr = MatGetLocalSize(C,&M,&N);CHKERRQ(ierr); 2155e32f2f54SBarry Smith if (M != N || M != mlocal_sub) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong local size"); 215669db28dcSHong Zhang ierr = PetscObjectQuery((PetscObject)C,"Mat_Redundant",(PetscObject *)&container);CHKERRQ(ierr); 215769db28dcSHong Zhang if (container) { 215869db28dcSHong Zhang ierr = PetscContainerGetPointer(container,(void **)&redund);CHKERRQ(ierr); 215969db28dcSHong Zhang } else { 2160e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Container does not exit"); 216169db28dcSHong Zhang } 2162e32f2f54SBarry Smith if (nzlocal != redund->nzlocal) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Cannot reuse matrix. Wrong nzlocal"); 216369db28dcSHong Zhang 216469db28dcSHong Zhang nsends = redund->nsends; 216569db28dcSHong Zhang nrecvs = redund->nrecvs; 21661d79065fSBarry Smith send_rank = redund->send_rank; 21671d79065fSBarry Smith recv_rank = redund->recv_rank; 21681d79065fSBarry Smith sbuf_nz = redund->sbuf_nz; 21691d79065fSBarry Smith rbuf_nz = redund->rbuf_nz; 217069db28dcSHong Zhang sbuf_j = redund->sbuf_j; 217169db28dcSHong Zhang sbuf_a = redund->sbuf_a; 217269db28dcSHong Zhang rbuf_j = redund->rbuf_j; 217369db28dcSHong Zhang rbuf_a = redund->rbuf_a; 217469db28dcSHong Zhang } 217569db28dcSHong Zhang 217669db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 217769db28dcSHong Zhang PetscMPIInt subrank,subsize; 217869db28dcSHong Zhang PetscInt nleftover,np_subcomm; 217969db28dcSHong Zhang /* get the destination processors' id send_rank, nsends and nrecvs */ 218069db28dcSHong Zhang ierr = MPI_Comm_rank(subcomm,&subrank);CHKERRQ(ierr); 218169db28dcSHong Zhang ierr = MPI_Comm_size(subcomm,&subsize);CHKERRQ(ierr); 21821d79065fSBarry Smith ierr = PetscMalloc2(size,PetscMPIInt,&send_rank,size,PetscMPIInt,&recv_rank); 218369db28dcSHong Zhang np_subcomm = size/nsubcomm; 218469db28dcSHong Zhang nleftover = size - nsubcomm*np_subcomm; 218569db28dcSHong Zhang nsends = 0; nrecvs = 0; 218669db28dcSHong Zhang for (i=0; i<size; i++){ /* i=rank*/ 218769db28dcSHong Zhang if (subrank == i/nsubcomm && rank != i){ /* my_subrank == other's subrank */ 218869db28dcSHong Zhang send_rank[nsends] = i; nsends++; 218969db28dcSHong Zhang recv_rank[nrecvs++] = i; 219069db28dcSHong Zhang } 219169db28dcSHong Zhang } 219269db28dcSHong Zhang if (rank >= size - nleftover){/* this proc is a leftover processor */ 219369db28dcSHong Zhang i = size-nleftover-1; 219469db28dcSHong Zhang j = 0; 219569db28dcSHong Zhang while (j < nsubcomm - nleftover){ 219669db28dcSHong Zhang send_rank[nsends++] = i; 219769db28dcSHong Zhang i--; j++; 219869db28dcSHong Zhang } 219969db28dcSHong Zhang } 220069db28dcSHong Zhang 220169db28dcSHong Zhang if (nleftover && subsize == size/nsubcomm && subrank==subsize-1){ /* this proc recvs from leftover processors */ 220269db28dcSHong Zhang for (i=0; i<nleftover; i++){ 220369db28dcSHong Zhang recv_rank[nrecvs++] = size-nleftover+i; 220469db28dcSHong Zhang } 220569db28dcSHong Zhang } 220669db28dcSHong Zhang 220769db28dcSHong Zhang /* allocate sbuf_j, sbuf_a */ 220869db28dcSHong Zhang i = nzlocal + rowrange[rank+1] - rowrange[rank] + 2; 220969db28dcSHong Zhang ierr = PetscMalloc(i*sizeof(PetscInt),&sbuf_j);CHKERRQ(ierr); 221069db28dcSHong Zhang ierr = PetscMalloc((nzlocal+1)*sizeof(PetscScalar),&sbuf_a);CHKERRQ(ierr); 221169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 221269db28dcSHong Zhang 221369db28dcSHong Zhang /* copy mat's local entries into the buffers */ 221469db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 221569db28dcSHong Zhang rownz_max = 0; 221669db28dcSHong Zhang rptr = sbuf_j; 221769db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 221869db28dcSHong Zhang vals = sbuf_a; 221969db28dcSHong Zhang rptr[0] = 0; 222069db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 222169db28dcSHong Zhang row = i + rstart; 222269db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 222369db28dcSHong Zhang ncols = nzA + nzB; 222469db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 222569db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 222669db28dcSHong Zhang /* load the column indices for this row into cols */ 222769db28dcSHong Zhang lwrite = 0; 222869db28dcSHong Zhang for (l=0; l<nzB; l++) { 222969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart){ 223069db28dcSHong Zhang vals[lwrite] = aworkB[l]; 223169db28dcSHong Zhang cols[lwrite++] = ctmp; 223269db28dcSHong Zhang } 223369db28dcSHong Zhang } 223469db28dcSHong Zhang for (l=0; l<nzA; l++){ 223569db28dcSHong Zhang vals[lwrite] = aworkA[l]; 223669db28dcSHong Zhang cols[lwrite++] = cstart + cworkA[l]; 223769db28dcSHong Zhang } 223869db28dcSHong Zhang for (l=0; l<nzB; l++) { 223969db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend){ 224069db28dcSHong Zhang vals[lwrite] = aworkB[l]; 224169db28dcSHong Zhang cols[lwrite++] = ctmp; 224269db28dcSHong Zhang } 224369db28dcSHong Zhang } 224469db28dcSHong Zhang vals += ncols; 224569db28dcSHong Zhang cols += ncols; 224669db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 224769db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 224869db28dcSHong Zhang } 2249e32f2f54SBarry Smith if (rptr[rend-rstart] != a->nz + b->nz) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_PLIB, "rptr[%d] %d != %d + %d",rend-rstart,rptr[rend-rstart+1],a->nz,b->nz); 225069db28dcSHong Zhang } else { /* only copy matrix values into sbuf_a */ 225169db28dcSHong Zhang rptr = sbuf_j; 225269db28dcSHong Zhang vals = sbuf_a; 225369db28dcSHong Zhang rptr[0] = 0; 225469db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 225569db28dcSHong Zhang row = i + rstart; 225669db28dcSHong Zhang nzA = a->i[i+1] - a->i[i]; nzB = b->i[i+1] - b->i[i]; 225769db28dcSHong Zhang ncols = nzA + nzB; 225869db28dcSHong Zhang cworkA = a->j + a->i[i]; cworkB = b->j + b->i[i]; 225969db28dcSHong Zhang aworkA = a->a + a->i[i]; aworkB = b->a + b->i[i]; 226069db28dcSHong Zhang lwrite = 0; 226169db28dcSHong Zhang for (l=0; l<nzB; l++) { 226269db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) < cstart) vals[lwrite++] = aworkB[l]; 226369db28dcSHong Zhang } 226469db28dcSHong Zhang for (l=0; l<nzA; l++) vals[lwrite++] = aworkA[l]; 226569db28dcSHong Zhang for (l=0; l<nzB; l++) { 226669db28dcSHong Zhang if ((ctmp = bmap[cworkB[l]]) >= cend) vals[lwrite++] = aworkB[l]; 226769db28dcSHong Zhang } 226869db28dcSHong Zhang vals += ncols; 226969db28dcSHong Zhang rptr[i+1] = rptr[i] + ncols; 227069db28dcSHong Zhang } 227169db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 227269db28dcSHong Zhang 227369db28dcSHong Zhang /* send nzlocal to others, and recv other's nzlocal */ 227469db28dcSHong Zhang /*--------------------------------------------------*/ 227569db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 227669db28dcSHong Zhang ierr = PetscMalloc2(3*(nsends + nrecvs)+1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 227769db28dcSHong Zhang s_waits2 = s_waits3 + nsends; 227869db28dcSHong Zhang s_waits1 = s_waits2 + nsends; 227969db28dcSHong Zhang r_waits1 = s_waits1 + nsends; 228069db28dcSHong Zhang r_waits2 = r_waits1 + nrecvs; 228169db28dcSHong Zhang r_waits3 = r_waits2 + nrecvs; 228269db28dcSHong Zhang } else { 228369db28dcSHong Zhang ierr = PetscMalloc2(nsends + nrecvs +1,MPI_Request,&s_waits3,nsends+1,MPI_Status,&send_status);CHKERRQ(ierr); 228469db28dcSHong Zhang r_waits3 = s_waits3 + nsends; 228569db28dcSHong Zhang } 228669db28dcSHong Zhang 228769db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag3);CHKERRQ(ierr); 228869db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 228969db28dcSHong Zhang /* get new tags to keep the communication clean */ 229069db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag1);CHKERRQ(ierr); 229169db28dcSHong Zhang ierr = PetscObjectGetNewTag((PetscObject)mat,&tag2);CHKERRQ(ierr); 22921d79065fSBarry Smith ierr = PetscMalloc4(nsends,PetscInt,&sbuf_nz,nrecvs,PetscInt,&rbuf_nz,nrecvs,PetscInt*,&rbuf_j,nrecvs,PetscScalar*,&rbuf_a);CHKERRQ(ierr); 229369db28dcSHong Zhang 229469db28dcSHong Zhang /* post receives of other's nzlocal */ 229569db28dcSHong Zhang for (i=0; i<nrecvs; i++){ 229669db28dcSHong Zhang ierr = MPI_Irecv(rbuf_nz+i,1,MPIU_INT,MPI_ANY_SOURCE,tag1,comm,r_waits1+i);CHKERRQ(ierr); 229769db28dcSHong Zhang } 229869db28dcSHong Zhang /* send nzlocal to others */ 229969db28dcSHong Zhang for (i=0; i<nsends; i++){ 230069db28dcSHong Zhang sbuf_nz[i] = nzlocal; 230169db28dcSHong Zhang ierr = MPI_Isend(sbuf_nz+i,1,MPIU_INT,send_rank[i],tag1,comm,s_waits1+i);CHKERRQ(ierr); 230269db28dcSHong Zhang } 230369db28dcSHong Zhang /* wait on receives of nzlocal; allocate space for rbuf_j, rbuf_a */ 230469db28dcSHong Zhang count = nrecvs; 230569db28dcSHong Zhang while (count) { 230669db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits1,&imdex,&recv_status);CHKERRQ(ierr); 230769db28dcSHong Zhang recv_rank[imdex] = recv_status.MPI_SOURCE; 230869db28dcSHong Zhang /* allocate rbuf_a and rbuf_j; then post receives of rbuf_j */ 230969db28dcSHong Zhang ierr = PetscMalloc((rbuf_nz[imdex]+1)*sizeof(PetscScalar),&rbuf_a[imdex]);CHKERRQ(ierr); 231069db28dcSHong Zhang 231169db28dcSHong Zhang i = rowrange[recv_status.MPI_SOURCE+1] - rowrange[recv_status.MPI_SOURCE]; /* number of expected mat->i */ 231269db28dcSHong Zhang rbuf_nz[imdex] += i + 2; 231369db28dcSHong Zhang ierr = PetscMalloc(rbuf_nz[imdex]*sizeof(PetscInt),&rbuf_j[imdex]);CHKERRQ(ierr); 231469db28dcSHong Zhang ierr = MPI_Irecv(rbuf_j[imdex],rbuf_nz[imdex],MPIU_INT,recv_status.MPI_SOURCE,tag2,comm,r_waits2+imdex);CHKERRQ(ierr); 231569db28dcSHong Zhang count--; 231669db28dcSHong Zhang } 231769db28dcSHong Zhang /* wait on sends of nzlocal */ 231869db28dcSHong Zhang if (nsends) {ierr = MPI_Waitall(nsends,s_waits1,send_status);CHKERRQ(ierr);} 231969db28dcSHong Zhang /* send mat->i,j to others, and recv from other's */ 232069db28dcSHong Zhang /*------------------------------------------------*/ 232169db28dcSHong Zhang for (i=0; i<nsends; i++){ 232269db28dcSHong Zhang j = nzlocal + rowrange[rank+1] - rowrange[rank] + 1; 232369db28dcSHong Zhang ierr = MPI_Isend(sbuf_j,j,MPIU_INT,send_rank[i],tag2,comm,s_waits2+i);CHKERRQ(ierr); 232469db28dcSHong Zhang } 232569db28dcSHong Zhang /* wait on receives of mat->i,j */ 232669db28dcSHong Zhang /*------------------------------*/ 232769db28dcSHong Zhang count = nrecvs; 232869db28dcSHong Zhang while (count) { 232969db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits2,&imdex,&recv_status);CHKERRQ(ierr); 2330e32f2f54SBarry Smith if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 233169db28dcSHong Zhang count--; 233269db28dcSHong Zhang } 233369db28dcSHong Zhang /* wait on sends of mat->i,j */ 233469db28dcSHong Zhang /*---------------------------*/ 233569db28dcSHong Zhang if (nsends) { 233669db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits2,send_status);CHKERRQ(ierr); 233769db28dcSHong Zhang } 233869db28dcSHong Zhang } /* endof if (reuse == MAT_INITIAL_MATRIX) */ 233969db28dcSHong Zhang 234069db28dcSHong Zhang /* post receives, send and receive mat->a */ 234169db28dcSHong Zhang /*----------------------------------------*/ 234269db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++) { 234369db28dcSHong Zhang ierr = MPI_Irecv(rbuf_a[imdex],rbuf_nz[imdex],MPIU_SCALAR,recv_rank[imdex],tag3,comm,r_waits3+imdex);CHKERRQ(ierr); 234469db28dcSHong Zhang } 234569db28dcSHong Zhang for (i=0; i<nsends; i++){ 234669db28dcSHong Zhang ierr = MPI_Isend(sbuf_a,nzlocal,MPIU_SCALAR,send_rank[i],tag3,comm,s_waits3+i);CHKERRQ(ierr); 234769db28dcSHong Zhang } 234869db28dcSHong Zhang count = nrecvs; 234969db28dcSHong Zhang while (count) { 235069db28dcSHong Zhang ierr = MPI_Waitany(nrecvs,r_waits3,&imdex,&recv_status);CHKERRQ(ierr); 2351e32f2f54SBarry Smith if (recv_rank[imdex] != recv_status.MPI_SOURCE) SETERRQ2(PETSC_COMM_SELF,1, "recv_rank %d != MPI_SOURCE %d",recv_rank[imdex],recv_status.MPI_SOURCE); 235269db28dcSHong Zhang count--; 235369db28dcSHong Zhang } 235469db28dcSHong Zhang if (nsends) { 235569db28dcSHong Zhang ierr = MPI_Waitall(nsends,s_waits3,send_status);CHKERRQ(ierr); 235669db28dcSHong Zhang } 235769db28dcSHong Zhang 235869db28dcSHong Zhang ierr = PetscFree2(s_waits3,send_status);CHKERRQ(ierr); 235969db28dcSHong Zhang 236069db28dcSHong Zhang /* create redundant matrix */ 236169db28dcSHong Zhang /*-------------------------*/ 236269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 236369db28dcSHong Zhang /* compute rownz_max for preallocation */ 236469db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 236569db28dcSHong Zhang j = rowrange[recv_rank[imdex]+1] - rowrange[recv_rank[imdex]]; 236669db28dcSHong Zhang rptr = rbuf_j[imdex]; 236769db28dcSHong Zhang for (i=0; i<j; i++){ 236869db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 236969db28dcSHong Zhang if (rownz_max < ncols) rownz_max = ncols; 237069db28dcSHong Zhang } 237169db28dcSHong Zhang } 237269db28dcSHong Zhang 237369db28dcSHong Zhang ierr = MatCreate(subcomm,&C);CHKERRQ(ierr); 237469db28dcSHong Zhang ierr = MatSetSizes(C,mlocal_sub,mlocal_sub,PETSC_DECIDE,PETSC_DECIDE);CHKERRQ(ierr); 237569db28dcSHong Zhang ierr = MatSetFromOptions(C);CHKERRQ(ierr); 237669db28dcSHong Zhang ierr = MatSeqAIJSetPreallocation(C,rownz_max,PETSC_NULL);CHKERRQ(ierr); 237769db28dcSHong Zhang ierr = MatMPIAIJSetPreallocation(C,rownz_max,PETSC_NULL,rownz_max,PETSC_NULL);CHKERRQ(ierr); 237869db28dcSHong Zhang } else { 237969db28dcSHong Zhang C = *matredundant; 238069db28dcSHong Zhang } 238169db28dcSHong Zhang 238269db28dcSHong Zhang /* insert local matrix entries */ 238369db28dcSHong Zhang rptr = sbuf_j; 238469db28dcSHong Zhang cols = sbuf_j + rend-rstart + 1; 238569db28dcSHong Zhang vals = sbuf_a; 238669db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 238769db28dcSHong Zhang row = i + rstart; 238869db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 238969db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 239069db28dcSHong Zhang vals += ncols; 239169db28dcSHong Zhang cols += ncols; 239269db28dcSHong Zhang } 239369db28dcSHong Zhang /* insert received matrix entries */ 239469db28dcSHong Zhang for (imdex=0; imdex<nrecvs; imdex++){ 239569db28dcSHong Zhang rstart = rowrange[recv_rank[imdex]]; 239669db28dcSHong Zhang rend = rowrange[recv_rank[imdex]+1]; 239769db28dcSHong Zhang rptr = rbuf_j[imdex]; 239869db28dcSHong Zhang cols = rbuf_j[imdex] + rend-rstart + 1; 239969db28dcSHong Zhang vals = rbuf_a[imdex]; 240069db28dcSHong Zhang for (i=0; i<rend-rstart; i++){ 240169db28dcSHong Zhang row = i + rstart; 240269db28dcSHong Zhang ncols = rptr[i+1] - rptr[i]; 240369db28dcSHong Zhang ierr = MatSetValues(C,1,&row,ncols,cols,vals,INSERT_VALUES);CHKERRQ(ierr); 240469db28dcSHong Zhang vals += ncols; 240569db28dcSHong Zhang cols += ncols; 240669db28dcSHong Zhang } 240769db28dcSHong Zhang } 240869db28dcSHong Zhang ierr = MatAssemblyBegin(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 240969db28dcSHong Zhang ierr = MatAssemblyEnd(C,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 241069db28dcSHong Zhang ierr = MatGetSize(C,&M,&N);CHKERRQ(ierr); 2411e32f2f54SBarry Smith if (M != mat->rmap->N || N != mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"redundant mat size %d != input mat size %d",M,mat->rmap->N); 241269db28dcSHong Zhang if (reuse == MAT_INITIAL_MATRIX){ 241369db28dcSHong Zhang PetscContainer container; 241469db28dcSHong Zhang *matredundant = C; 241569db28dcSHong Zhang /* create a supporting struct and attach it to C for reuse */ 241638f2d2fdSLisandro Dalcin ierr = PetscNewLog(C,Mat_Redundant,&redund);CHKERRQ(ierr); 241769db28dcSHong Zhang ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 241869db28dcSHong Zhang ierr = PetscContainerSetPointer(container,redund);CHKERRQ(ierr); 241969db28dcSHong Zhang ierr = PetscObjectCompose((PetscObject)C,"Mat_Redundant",(PetscObject)container);CHKERRQ(ierr); 242069db28dcSHong Zhang ierr = PetscContainerSetUserDestroy(container,PetscContainerDestroy_MatRedundant);CHKERRQ(ierr); 242169db28dcSHong Zhang 242269db28dcSHong Zhang redund->nzlocal = nzlocal; 242369db28dcSHong Zhang redund->nsends = nsends; 242469db28dcSHong Zhang redund->nrecvs = nrecvs; 242569db28dcSHong Zhang redund->send_rank = send_rank; 24261d79065fSBarry Smith redund->recv_rank = recv_rank; 242769db28dcSHong Zhang redund->sbuf_nz = sbuf_nz; 24281d79065fSBarry Smith redund->rbuf_nz = rbuf_nz; 242969db28dcSHong Zhang redund->sbuf_j = sbuf_j; 243069db28dcSHong Zhang redund->sbuf_a = sbuf_a; 243169db28dcSHong Zhang redund->rbuf_j = rbuf_j; 243269db28dcSHong Zhang redund->rbuf_a = rbuf_a; 243369db28dcSHong Zhang 243469db28dcSHong Zhang redund->MatDestroy = C->ops->destroy; 243569db28dcSHong Zhang C->ops->destroy = MatDestroy_MatRedundant; 243669db28dcSHong Zhang } 243769db28dcSHong Zhang PetscFunctionReturn(0); 243869db28dcSHong Zhang } 243969db28dcSHong Zhang 244003bc72f1SMatthew Knepley #undef __FUNCT__ 2441c91732d9SHong Zhang #define __FUNCT__ "MatGetRowMaxAbs_MPIAIJ" 2442c91732d9SHong Zhang PetscErrorCode MatGetRowMaxAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2443c91732d9SHong Zhang { 2444c91732d9SHong Zhang Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2445c91732d9SHong Zhang PetscErrorCode ierr; 2446c91732d9SHong Zhang PetscInt i,*idxb = 0; 2447c91732d9SHong Zhang PetscScalar *va,*vb; 2448c91732d9SHong Zhang Vec vtmp; 2449c91732d9SHong Zhang 2450c91732d9SHong Zhang PetscFunctionBegin; 2451c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->A,v,idx);CHKERRQ(ierr); 2452c91732d9SHong Zhang ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2453c91732d9SHong Zhang if (idx) { 2454192daf7cSBarry Smith for (i=0; i<A->rmap->n; i++) { 2455d0f46423SBarry Smith if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2456c91732d9SHong Zhang } 2457c91732d9SHong Zhang } 2458c91732d9SHong Zhang 2459d0f46423SBarry Smith ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2460c91732d9SHong Zhang if (idx) { 2461d0f46423SBarry Smith ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2462c91732d9SHong Zhang } 2463c91732d9SHong Zhang ierr = MatGetRowMaxAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2464c91732d9SHong Zhang ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2465c91732d9SHong Zhang 2466d0f46423SBarry Smith for (i=0; i<A->rmap->n; i++){ 2467c91732d9SHong Zhang if (PetscAbsScalar(va[i]) < PetscAbsScalar(vb[i])) { 2468c91732d9SHong Zhang va[i] = vb[i]; 2469c91732d9SHong Zhang if (idx) idx[i] = a->garray[idxb[i]]; 2470c91732d9SHong Zhang } 2471c91732d9SHong Zhang } 2472c91732d9SHong Zhang 2473c91732d9SHong Zhang ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2474c91732d9SHong Zhang ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2475c91732d9SHong Zhang if (idxb) { 2476c91732d9SHong Zhang ierr = PetscFree(idxb);CHKERRQ(ierr); 2477c91732d9SHong Zhang } 2478c91732d9SHong Zhang ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2479c91732d9SHong Zhang PetscFunctionReturn(0); 2480c91732d9SHong Zhang } 2481c91732d9SHong Zhang 2482c91732d9SHong Zhang #undef __FUNCT__ 2483c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMinAbs_MPIAIJ" 2484c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMinAbs_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2485c87e5d42SMatthew Knepley { 2486c87e5d42SMatthew Knepley Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 2487c87e5d42SMatthew Knepley PetscErrorCode ierr; 2488c87e5d42SMatthew Knepley PetscInt i,*idxb = 0; 2489c87e5d42SMatthew Knepley PetscScalar *va,*vb; 2490c87e5d42SMatthew Knepley Vec vtmp; 2491c87e5d42SMatthew Knepley 2492c87e5d42SMatthew Knepley PetscFunctionBegin; 2493c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->A,v,idx);CHKERRQ(ierr); 2494c87e5d42SMatthew Knepley ierr = VecGetArray(v,&va);CHKERRQ(ierr); 2495c87e5d42SMatthew Knepley if (idx) { 2496c87e5d42SMatthew Knepley for (i=0; i<A->cmap->n; i++) { 2497c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i])) idx[i] += A->cmap->rstart; 2498c87e5d42SMatthew Knepley } 2499c87e5d42SMatthew Knepley } 2500c87e5d42SMatthew Knepley 2501c87e5d42SMatthew Knepley ierr = VecCreateSeq(PETSC_COMM_SELF,A->rmap->n,&vtmp);CHKERRQ(ierr); 2502c87e5d42SMatthew Knepley if (idx) { 2503c87e5d42SMatthew Knepley ierr = PetscMalloc(A->rmap->n*sizeof(PetscInt),&idxb);CHKERRQ(ierr); 2504c87e5d42SMatthew Knepley } 2505c87e5d42SMatthew Knepley ierr = MatGetRowMinAbs(a->B,vtmp,idxb);CHKERRQ(ierr); 2506c87e5d42SMatthew Knepley ierr = VecGetArray(vtmp,&vb);CHKERRQ(ierr); 2507c87e5d42SMatthew Knepley 2508c87e5d42SMatthew Knepley for (i=0; i<A->rmap->n; i++){ 2509c87e5d42SMatthew Knepley if (PetscAbsScalar(va[i]) > PetscAbsScalar(vb[i])) { 2510c87e5d42SMatthew Knepley va[i] = vb[i]; 2511c87e5d42SMatthew Knepley if (idx) idx[i] = a->garray[idxb[i]]; 2512c87e5d42SMatthew Knepley } 2513c87e5d42SMatthew Knepley } 2514c87e5d42SMatthew Knepley 2515c87e5d42SMatthew Knepley ierr = VecRestoreArray(v,&va);CHKERRQ(ierr); 2516c87e5d42SMatthew Knepley ierr = VecRestoreArray(vtmp,&vb);CHKERRQ(ierr); 2517c87e5d42SMatthew Knepley if (idxb) { 2518c87e5d42SMatthew Knepley ierr = PetscFree(idxb);CHKERRQ(ierr); 2519c87e5d42SMatthew Knepley } 2520c87e5d42SMatthew Knepley ierr = VecDestroy(vtmp);CHKERRQ(ierr); 2521c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2522c87e5d42SMatthew Knepley } 2523c87e5d42SMatthew Knepley 2524c87e5d42SMatthew Knepley #undef __FUNCT__ 252503bc72f1SMatthew Knepley #define __FUNCT__ "MatGetRowMin_MPIAIJ" 252603bc72f1SMatthew Knepley PetscErrorCode MatGetRowMin_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 252703bc72f1SMatthew Knepley { 252803bc72f1SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2529d0f46423SBarry Smith PetscInt n = A->rmap->n; 2530d0f46423SBarry Smith PetscInt cstart = A->cmap->rstart; 253103bc72f1SMatthew Knepley PetscInt *cmap = mat->garray; 253203bc72f1SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 253303bc72f1SMatthew Knepley Vec diagV, offdiagV; 253403bc72f1SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 253503bc72f1SMatthew Knepley PetscInt r; 253603bc72f1SMatthew Knepley PetscErrorCode ierr; 253703bc72f1SMatthew Knepley 253803bc72f1SMatthew Knepley PetscFunctionBegin; 253903bc72f1SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2540e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2541e64afeacSLisandro Dalcin ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 254203bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->A, diagV, diagIdx);CHKERRQ(ierr); 254303bc72f1SMatthew Knepley ierr = MatGetRowMin(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 254403bc72f1SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 254503bc72f1SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 254603bc72f1SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 254703bc72f1SMatthew Knepley for(r = 0; r < n; ++r) { 2548028cd4eaSSatish Balay if (PetscAbsScalar(diagA[r]) <= PetscAbsScalar(offdiagA[r])) { 254903bc72f1SMatthew Knepley a[r] = diagA[r]; 255003bc72f1SMatthew Knepley idx[r] = cstart + diagIdx[r]; 255103bc72f1SMatthew Knepley } else { 255203bc72f1SMatthew Knepley a[r] = offdiagA[r]; 255303bc72f1SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 255403bc72f1SMatthew Knepley } 255503bc72f1SMatthew Knepley } 255603bc72f1SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 255703bc72f1SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 255803bc72f1SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 255903bc72f1SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 256003bc72f1SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 256103bc72f1SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 256203bc72f1SMatthew Knepley PetscFunctionReturn(0); 256303bc72f1SMatthew Knepley } 256403bc72f1SMatthew Knepley 25655494a064SHong Zhang #undef __FUNCT__ 2566c87e5d42SMatthew Knepley #define __FUNCT__ "MatGetRowMax_MPIAIJ" 2567c87e5d42SMatthew Knepley PetscErrorCode MatGetRowMax_MPIAIJ(Mat A, Vec v, PetscInt idx[]) 2568c87e5d42SMatthew Knepley { 2569c87e5d42SMatthew Knepley Mat_MPIAIJ *mat = (Mat_MPIAIJ *) A->data; 2570c87e5d42SMatthew Knepley PetscInt n = A->rmap->n; 2571c87e5d42SMatthew Knepley PetscInt cstart = A->cmap->rstart; 2572c87e5d42SMatthew Knepley PetscInt *cmap = mat->garray; 2573c87e5d42SMatthew Knepley PetscInt *diagIdx, *offdiagIdx; 2574c87e5d42SMatthew Knepley Vec diagV, offdiagV; 2575c87e5d42SMatthew Knepley PetscScalar *a, *diagA, *offdiagA; 2576c87e5d42SMatthew Knepley PetscInt r; 2577c87e5d42SMatthew Knepley PetscErrorCode ierr; 2578c87e5d42SMatthew Knepley 2579c87e5d42SMatthew Knepley PetscFunctionBegin; 2580c87e5d42SMatthew Knepley ierr = PetscMalloc2(n,PetscInt,&diagIdx,n,PetscInt,&offdiagIdx);CHKERRQ(ierr); 2581c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &diagV);CHKERRQ(ierr); 2582c87e5d42SMatthew Knepley ierr = VecCreateSeq(((PetscObject)A)->comm, n, &offdiagV);CHKERRQ(ierr); 2583c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->A, diagV, diagIdx);CHKERRQ(ierr); 2584c87e5d42SMatthew Knepley ierr = MatGetRowMax(mat->B, offdiagV, offdiagIdx);CHKERRQ(ierr); 2585c87e5d42SMatthew Knepley ierr = VecGetArray(v, &a);CHKERRQ(ierr); 2586c87e5d42SMatthew Knepley ierr = VecGetArray(diagV, &diagA);CHKERRQ(ierr); 2587c87e5d42SMatthew Knepley ierr = VecGetArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2588c87e5d42SMatthew Knepley for(r = 0; r < n; ++r) { 2589c87e5d42SMatthew Knepley if (PetscAbsScalar(diagA[r]) >= PetscAbsScalar(offdiagA[r])) { 2590c87e5d42SMatthew Knepley a[r] = diagA[r]; 2591c87e5d42SMatthew Knepley idx[r] = cstart + diagIdx[r]; 2592c87e5d42SMatthew Knepley } else { 2593c87e5d42SMatthew Knepley a[r] = offdiagA[r]; 2594c87e5d42SMatthew Knepley idx[r] = cmap[offdiagIdx[r]]; 2595c87e5d42SMatthew Knepley } 2596c87e5d42SMatthew Knepley } 2597c87e5d42SMatthew Knepley ierr = VecRestoreArray(v, &a);CHKERRQ(ierr); 2598c87e5d42SMatthew Knepley ierr = VecRestoreArray(diagV, &diagA);CHKERRQ(ierr); 2599c87e5d42SMatthew Knepley ierr = VecRestoreArray(offdiagV, &offdiagA);CHKERRQ(ierr); 2600c87e5d42SMatthew Knepley ierr = VecDestroy(diagV);CHKERRQ(ierr); 2601c87e5d42SMatthew Knepley ierr = VecDestroy(offdiagV);CHKERRQ(ierr); 2602c87e5d42SMatthew Knepley ierr = PetscFree2(diagIdx, offdiagIdx);CHKERRQ(ierr); 2603c87e5d42SMatthew Knepley PetscFunctionReturn(0); 2604c87e5d42SMatthew Knepley } 2605c87e5d42SMatthew Knepley 2606c87e5d42SMatthew Knepley #undef __FUNCT__ 2607829201f2SHong Zhang #define __FUNCT__ "MatGetSeqNonzerostructure_MPIAIJ" 2608f6d58c54SBarry Smith PetscErrorCode MatGetSeqNonzerostructure_MPIAIJ(Mat mat,Mat *newmat) 26095494a064SHong Zhang { 26105494a064SHong Zhang PetscErrorCode ierr; 2611f6d58c54SBarry Smith Mat *dummy; 26125494a064SHong Zhang 26135494a064SHong Zhang PetscFunctionBegin; 2614f6d58c54SBarry Smith ierr = MatGetSubMatrix_MPIAIJ_All(mat,MAT_DO_NOT_GET_VALUES,MAT_INITIAL_MATRIX,&dummy);CHKERRQ(ierr); 2615f6d58c54SBarry Smith *newmat = *dummy; 2616f6d58c54SBarry Smith ierr = PetscFree(dummy);CHKERRQ(ierr); 26175494a064SHong Zhang PetscFunctionReturn(0); 26185494a064SHong Zhang } 26195494a064SHong Zhang 26203acb8795SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatFDColoringApply_AIJ(Mat,MatFDColoring,Vec,MatStructure*,void*); 26218a729477SBarry Smith /* -------------------------------------------------------------------*/ 2622cda55fadSBarry Smith static struct _MatOps MatOps_Values = {MatSetValues_MPIAIJ, 2623cda55fadSBarry Smith MatGetRow_MPIAIJ, 2624cda55fadSBarry Smith MatRestoreRow_MPIAIJ, 2625cda55fadSBarry Smith MatMult_MPIAIJ, 262697304618SKris Buschelman /* 4*/ MatMultAdd_MPIAIJ, 26277c922b88SBarry Smith MatMultTranspose_MPIAIJ, 26287c922b88SBarry Smith MatMultTransposeAdd_MPIAIJ, 2629103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2630103bf8bdSMatthew Knepley MatSolve_MPIAIJ, 2631103bf8bdSMatthew Knepley #else 2632cda55fadSBarry Smith 0, 2633103bf8bdSMatthew Knepley #endif 2634cda55fadSBarry Smith 0, 2635cda55fadSBarry Smith 0, 263697304618SKris Buschelman /*10*/ 0, 2637cda55fadSBarry Smith 0, 2638cda55fadSBarry Smith 0, 263941f059aeSBarry Smith MatSOR_MPIAIJ, 2640b7c46309SBarry Smith MatTranspose_MPIAIJ, 264197304618SKris Buschelman /*15*/ MatGetInfo_MPIAIJ, 2642cda55fadSBarry Smith MatEqual_MPIAIJ, 2643cda55fadSBarry Smith MatGetDiagonal_MPIAIJ, 2644cda55fadSBarry Smith MatDiagonalScale_MPIAIJ, 2645cda55fadSBarry Smith MatNorm_MPIAIJ, 264697304618SKris Buschelman /*20*/ MatAssemblyBegin_MPIAIJ, 2647cda55fadSBarry Smith MatAssemblyEnd_MPIAIJ, 2648cda55fadSBarry Smith MatSetOption_MPIAIJ, 2649cda55fadSBarry Smith MatZeroEntries_MPIAIJ, 2650d519adbfSMatthew Knepley /*24*/ MatZeroRows_MPIAIJ, 2651cda55fadSBarry Smith 0, 2652103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2653719d5645SBarry Smith 0, 2654103bf8bdSMatthew Knepley #else 2655cda55fadSBarry Smith 0, 2656103bf8bdSMatthew Knepley #endif 2657cda55fadSBarry Smith 0, 2658cda55fadSBarry Smith 0, 2659d519adbfSMatthew Knepley /*29*/ MatSetUpPreallocation_MPIAIJ, 2660103bf8bdSMatthew Knepley #ifdef PETSC_HAVE_PBGL 2661719d5645SBarry Smith 0, 2662103bf8bdSMatthew Knepley #else 2663cda55fadSBarry Smith 0, 2664103bf8bdSMatthew Knepley #endif 2665cda55fadSBarry Smith 0, 2666cda55fadSBarry Smith 0, 2667cda55fadSBarry Smith 0, 2668d519adbfSMatthew Knepley /*34*/ MatDuplicate_MPIAIJ, 2669cda55fadSBarry Smith 0, 2670cda55fadSBarry Smith 0, 2671cda55fadSBarry Smith 0, 2672cda55fadSBarry Smith 0, 2673d519adbfSMatthew Knepley /*39*/ MatAXPY_MPIAIJ, 2674cda55fadSBarry Smith MatGetSubMatrices_MPIAIJ, 2675cda55fadSBarry Smith MatIncreaseOverlap_MPIAIJ, 2676cda55fadSBarry Smith MatGetValues_MPIAIJ, 2677cb5b572fSBarry Smith MatCopy_MPIAIJ, 2678d519adbfSMatthew Knepley /*44*/ MatGetRowMax_MPIAIJ, 2679cda55fadSBarry Smith MatScale_MPIAIJ, 2680cda55fadSBarry Smith 0, 2681cda55fadSBarry Smith 0, 2682cda55fadSBarry Smith 0, 2683d519adbfSMatthew Knepley /*49*/ MatSetBlockSize_MPIAIJ, 2684cda55fadSBarry Smith 0, 2685cda55fadSBarry Smith 0, 2686cda55fadSBarry Smith 0, 2687cda55fadSBarry Smith 0, 2688d519adbfSMatthew Knepley /*54*/ MatFDColoringCreate_MPIAIJ, 2689cda55fadSBarry Smith 0, 2690cda55fadSBarry Smith MatSetUnfactored_MPIAIJ, 269142e855d1Svictor MatPermute_MPIAIJ, 2692cda55fadSBarry Smith 0, 2693d519adbfSMatthew Knepley /*59*/ MatGetSubMatrix_MPIAIJ, 2694e03a110bSBarry Smith MatDestroy_MPIAIJ, 2695e03a110bSBarry Smith MatView_MPIAIJ, 2696357abbc8SBarry Smith 0, 2697a2243be0SBarry Smith 0, 2698d519adbfSMatthew Knepley /*64*/ 0, 2699a2243be0SBarry Smith 0, 2700a2243be0SBarry Smith 0, 2701a2243be0SBarry Smith 0, 2702a2243be0SBarry Smith 0, 2703d519adbfSMatthew Knepley /*69*/ MatGetRowMaxAbs_MPIAIJ, 2704c87e5d42SMatthew Knepley MatGetRowMinAbs_MPIAIJ, 2705a2243be0SBarry Smith 0, 2706a2243be0SBarry Smith MatSetColoring_MPIAIJ, 2707dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 2708779c1a83SBarry Smith MatSetValuesAdic_MPIAIJ, 2709dcf5cc72SBarry Smith #else 2710dcf5cc72SBarry Smith 0, 2711dcf5cc72SBarry Smith #endif 271297304618SKris Buschelman MatSetValuesAdifor_MPIAIJ, 27133acb8795SBarry Smith /*75*/ MatFDColoringApply_AIJ, 271497304618SKris Buschelman 0, 271597304618SKris Buschelman 0, 271697304618SKris Buschelman 0, 271797304618SKris Buschelman 0, 271897304618SKris Buschelman /*80*/ 0, 271997304618SKris Buschelman 0, 272097304618SKris Buschelman 0, 27215bba2384SShri Abhyankar /*83*/ MatLoad_MPIAIJ, 27226284ec50SHong Zhang 0, 27236284ec50SHong Zhang 0, 27246284ec50SHong Zhang 0, 27256284ec50SHong Zhang 0, 2726865e5f61SKris Buschelman 0, 2727d519adbfSMatthew Knepley /*89*/ MatMatMult_MPIAIJ_MPIAIJ, 272826be0446SHong Zhang MatMatMultSymbolic_MPIAIJ_MPIAIJ, 272926be0446SHong Zhang MatMatMultNumeric_MPIAIJ_MPIAIJ, 27307a7894deSKris Buschelman MatPtAP_Basic, 27317a7894deSKris Buschelman MatPtAPSymbolic_MPIAIJ, 2732d519adbfSMatthew Knepley /*94*/ MatPtAPNumeric_MPIAIJ, 27337a7894deSKris Buschelman 0, 27347a7894deSKris Buschelman 0, 27357a7894deSKris Buschelman 0, 27367a7894deSKris Buschelman 0, 2737d519adbfSMatthew Knepley /*99*/ 0, 2738865e5f61SKris Buschelman MatPtAPSymbolic_MPIAIJ_MPIAIJ, 27397a7894deSKris Buschelman MatPtAPNumeric_MPIAIJ_MPIAIJ, 27402fd7e33dSBarry Smith MatConjugate_MPIAIJ, 27412fd7e33dSBarry Smith 0, 2742d519adbfSMatthew Knepley /*104*/MatSetValuesRow_MPIAIJ, 274399cafbc1SBarry Smith MatRealPart_MPIAIJ, 274469db28dcSHong Zhang MatImaginaryPart_MPIAIJ, 274569db28dcSHong Zhang 0, 274669db28dcSHong Zhang 0, 2747d519adbfSMatthew Knepley /*109*/0, 274803bc72f1SMatthew Knepley MatGetRedundantMatrix_MPIAIJ, 27495494a064SHong Zhang MatGetRowMin_MPIAIJ, 27505494a064SHong Zhang 0, 27515494a064SHong Zhang 0, 2752bd0c2dcbSBarry Smith /*114*/MatGetSeqNonzerostructure_MPIAIJ, 2753bd0c2dcbSBarry Smith 0, 2754bd0c2dcbSBarry Smith 0, 2755bd0c2dcbSBarry Smith 0, 2756bd0c2dcbSBarry Smith 0, 27578fb81238SShri Abhyankar /*119*/0, 27588fb81238SShri Abhyankar 0, 27598fb81238SShri Abhyankar 0, 2760d6037b41SHong Zhang 0, 2761d6037b41SHong Zhang MatGetMultiProcBlock_MPIAIJ 2762bd0c2dcbSBarry Smith }; 276336ce4990SBarry Smith 27642e8a6d31SBarry Smith /* ----------------------------------------------------------------------------------------*/ 27652e8a6d31SBarry Smith 2766fb2e594dSBarry Smith EXTERN_C_BEGIN 27674a2ae208SSatish Balay #undef __FUNCT__ 27684a2ae208SSatish Balay #define __FUNCT__ "MatStoreValues_MPIAIJ" 2769be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatStoreValues_MPIAIJ(Mat mat) 27702e8a6d31SBarry Smith { 27712e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2772dfbe8321SBarry Smith PetscErrorCode ierr; 27732e8a6d31SBarry Smith 27742e8a6d31SBarry Smith PetscFunctionBegin; 27752e8a6d31SBarry Smith ierr = MatStoreValues(aij->A);CHKERRQ(ierr); 27762e8a6d31SBarry Smith ierr = MatStoreValues(aij->B);CHKERRQ(ierr); 27772e8a6d31SBarry Smith PetscFunctionReturn(0); 27782e8a6d31SBarry Smith } 2779fb2e594dSBarry Smith EXTERN_C_END 27802e8a6d31SBarry Smith 2781fb2e594dSBarry Smith EXTERN_C_BEGIN 27824a2ae208SSatish Balay #undef __FUNCT__ 27834a2ae208SSatish Balay #define __FUNCT__ "MatRetrieveValues_MPIAIJ" 2784be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatRetrieveValues_MPIAIJ(Mat mat) 27852e8a6d31SBarry Smith { 27862e8a6d31SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ *)mat->data; 2787dfbe8321SBarry Smith PetscErrorCode ierr; 27882e8a6d31SBarry Smith 27892e8a6d31SBarry Smith PetscFunctionBegin; 27902e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->A);CHKERRQ(ierr); 27912e8a6d31SBarry Smith ierr = MatRetrieveValues(aij->B);CHKERRQ(ierr); 27922e8a6d31SBarry Smith PetscFunctionReturn(0); 27932e8a6d31SBarry Smith } 2794fb2e594dSBarry Smith EXTERN_C_END 27958a729477SBarry Smith 279627508adbSBarry Smith EXTERN_C_BEGIN 27974a2ae208SSatish Balay #undef __FUNCT__ 2798a23d5eceSKris Buschelman #define __FUNCT__ "MatMPIAIJSetPreallocation_MPIAIJ" 2799be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation_MPIAIJ(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 2800a23d5eceSKris Buschelman { 2801a23d5eceSKris Buschelman Mat_MPIAIJ *b; 2802dfbe8321SBarry Smith PetscErrorCode ierr; 2803b1d57f15SBarry Smith PetscInt i; 2804a23d5eceSKris Buschelman 2805a23d5eceSKris Buschelman PetscFunctionBegin; 2806a23d5eceSKris Buschelman if (d_nz == PETSC_DEFAULT || d_nz == PETSC_DECIDE) d_nz = 5; 2807a23d5eceSKris Buschelman if (o_nz == PETSC_DEFAULT || o_nz == PETSC_DECIDE) o_nz = 2; 2808e32f2f54SBarry Smith if (d_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nz cannot be less than 0: value %D",d_nz); 2809e32f2f54SBarry Smith if (o_nz < 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nz cannot be less than 0: value %D",o_nz); 2810899cda47SBarry Smith 281126283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 281226283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 281326283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 281426283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 2815a23d5eceSKris Buschelman if (d_nnz) { 2816d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 2817e32f2f54SBarry Smith if (d_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"d_nnz cannot be less than 0: local row %D value %D",i,d_nnz[i]); 2818a23d5eceSKris Buschelman } 2819a23d5eceSKris Buschelman } 2820a23d5eceSKris Buschelman if (o_nnz) { 2821d0f46423SBarry Smith for (i=0; i<B->rmap->n; i++) { 2822e32f2f54SBarry Smith if (o_nnz[i] < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"o_nnz cannot be less than 0: local row %D value %D",i,o_nnz[i]); 2823a23d5eceSKris Buschelman } 2824a23d5eceSKris Buschelman } 2825a23d5eceSKris Buschelman b = (Mat_MPIAIJ*)B->data; 2826899cda47SBarry Smith 2827526dfc15SBarry Smith if (!B->preallocated) { 2828899cda47SBarry Smith /* Explicitly create 2 MATSEQAIJ matrices. */ 2829899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->A);CHKERRQ(ierr); 2830d0f46423SBarry Smith ierr = MatSetSizes(b->A,B->rmap->n,B->cmap->n,B->rmap->n,B->cmap->n);CHKERRQ(ierr); 2831899cda47SBarry Smith ierr = MatSetType(b->A,MATSEQAIJ);CHKERRQ(ierr); 2832899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->A);CHKERRQ(ierr); 2833899cda47SBarry Smith ierr = MatCreate(PETSC_COMM_SELF,&b->B);CHKERRQ(ierr); 2834d0f46423SBarry Smith ierr = MatSetSizes(b->B,B->rmap->n,B->cmap->N,B->rmap->n,B->cmap->N);CHKERRQ(ierr); 2835899cda47SBarry Smith ierr = MatSetType(b->B,MATSEQAIJ);CHKERRQ(ierr); 2836899cda47SBarry Smith ierr = PetscLogObjectParent(B,b->B);CHKERRQ(ierr); 2837526dfc15SBarry Smith } 2838899cda47SBarry Smith 2839c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->A,d_nz,d_nnz);CHKERRQ(ierr); 2840c60e587dSKris Buschelman ierr = MatSeqAIJSetPreallocation(b->B,o_nz,o_nnz);CHKERRQ(ierr); 2841526dfc15SBarry Smith B->preallocated = PETSC_TRUE; 2842a23d5eceSKris Buschelman PetscFunctionReturn(0); 2843a23d5eceSKris Buschelman } 2844a23d5eceSKris Buschelman EXTERN_C_END 2845a23d5eceSKris Buschelman 28464a2ae208SSatish Balay #undef __FUNCT__ 28474a2ae208SSatish Balay #define __FUNCT__ "MatDuplicate_MPIAIJ" 2848dfbe8321SBarry Smith PetscErrorCode MatDuplicate_MPIAIJ(Mat matin,MatDuplicateOption cpvalues,Mat *newmat) 2849d6dfbf8fSBarry Smith { 2850d6dfbf8fSBarry Smith Mat mat; 2851416022c9SBarry Smith Mat_MPIAIJ *a,*oldmat = (Mat_MPIAIJ*)matin->data; 2852dfbe8321SBarry Smith PetscErrorCode ierr; 2853d6dfbf8fSBarry Smith 28543a40ed3dSBarry Smith PetscFunctionBegin; 2855416022c9SBarry Smith *newmat = 0; 28567adad957SLisandro Dalcin ierr = MatCreate(((PetscObject)matin)->comm,&mat);CHKERRQ(ierr); 2857d0f46423SBarry Smith ierr = MatSetSizes(mat,matin->rmap->n,matin->cmap->n,matin->rmap->N,matin->cmap->N);CHKERRQ(ierr); 28587adad957SLisandro Dalcin ierr = MatSetType(mat,((PetscObject)matin)->type_name);CHKERRQ(ierr); 28591d5dac46SHong Zhang ierr = PetscMemcpy(mat->ops,matin->ops,sizeof(struct _MatOps));CHKERRQ(ierr); 2860273d9f13SBarry Smith a = (Mat_MPIAIJ*)mat->data; 2861e1b6402fSHong Zhang 2862d5f3da31SBarry Smith mat->factortype = matin->factortype; 2863d0f46423SBarry Smith mat->rmap->bs = matin->rmap->bs; 2864c456f294SBarry Smith mat->assembled = PETSC_TRUE; 2865e7641de0SSatish Balay mat->insertmode = NOT_SET_VALUES; 2866273d9f13SBarry Smith mat->preallocated = PETSC_TRUE; 2867d6dfbf8fSBarry Smith 286817699dbbSLois Curfman McInnes a->size = oldmat->size; 286917699dbbSLois Curfman McInnes a->rank = oldmat->rank; 2870e7641de0SSatish Balay a->donotstash = oldmat->donotstash; 2871e7641de0SSatish Balay a->roworiented = oldmat->roworiented; 2872e7641de0SSatish Balay a->rowindices = 0; 2873bcd2baecSBarry Smith a->rowvalues = 0; 2874bcd2baecSBarry Smith a->getrowactive = PETSC_FALSE; 2875d6dfbf8fSBarry Smith 287626283091SBarry Smith ierr = PetscLayoutCopy(matin->rmap,&mat->rmap);CHKERRQ(ierr); 287726283091SBarry Smith ierr = PetscLayoutCopy(matin->cmap,&mat->cmap);CHKERRQ(ierr); 2878899cda47SBarry Smith 28792ee70a88SLois Curfman McInnes if (oldmat->colmap) { 2880aa482453SBarry Smith #if defined (PETSC_USE_CTABLE) 28810f5bd95cSBarry Smith ierr = PetscTableCreateCopy(oldmat->colmap,&a->colmap);CHKERRQ(ierr); 2882b1fc9764SSatish Balay #else 2883d0f46423SBarry Smith ierr = PetscMalloc((mat->cmap->N)*sizeof(PetscInt),&a->colmap);CHKERRQ(ierr); 2884d0f46423SBarry Smith ierr = PetscLogObjectMemory(mat,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2885d0f46423SBarry Smith ierr = PetscMemcpy(a->colmap,oldmat->colmap,(mat->cmap->N)*sizeof(PetscInt));CHKERRQ(ierr); 2886b1fc9764SSatish Balay #endif 2887416022c9SBarry Smith } else a->colmap = 0; 28883f41c07dSBarry Smith if (oldmat->garray) { 2889b1d57f15SBarry Smith PetscInt len; 2890d0f46423SBarry Smith len = oldmat->B->cmap->n; 2891b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&a->garray);CHKERRQ(ierr); 289252e6d16bSBarry Smith ierr = PetscLogObjectMemory(mat,len*sizeof(PetscInt));CHKERRQ(ierr); 2893b1d57f15SBarry Smith if (len) { ierr = PetscMemcpy(a->garray,oldmat->garray,len*sizeof(PetscInt));CHKERRQ(ierr); } 2894416022c9SBarry Smith } else a->garray = 0; 2895d6dfbf8fSBarry Smith 2896416022c9SBarry Smith ierr = VecDuplicate(oldmat->lvec,&a->lvec);CHKERRQ(ierr); 289752e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->lvec);CHKERRQ(ierr); 2898a56f8943SBarry Smith ierr = VecScatterCopy(oldmat->Mvctx,&a->Mvctx);CHKERRQ(ierr); 289952e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->Mvctx);CHKERRQ(ierr); 29002e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->A,cpvalues,&a->A);CHKERRQ(ierr); 290152e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->A);CHKERRQ(ierr); 29022e8a6d31SBarry Smith ierr = MatDuplicate(oldmat->B,cpvalues,&a->B);CHKERRQ(ierr); 290352e6d16bSBarry Smith ierr = PetscLogObjectParent(mat,a->B);CHKERRQ(ierr); 29047adad957SLisandro Dalcin ierr = PetscFListDuplicate(((PetscObject)matin)->qlist,&((PetscObject)mat)->qlist);CHKERRQ(ierr); 29058a729477SBarry Smith *newmat = mat; 29063a40ed3dSBarry Smith PetscFunctionReturn(0); 29078a729477SBarry Smith } 2908416022c9SBarry Smith 29091a4ee126SBarry Smith /* 29101a4ee126SBarry Smith Allows sending/receiving larger messages then 2 gigabytes in a single call 29111a4ee126SBarry Smith */ 29121a4ee126SBarry Smith static int MPILong_Send(void *mess,PetscInt cnt, MPI_Datatype type,int to, int tag, MPI_Comm comm) 29131a4ee126SBarry Smith { 29141a4ee126SBarry Smith int ierr; 29151a4ee126SBarry Smith static PetscInt CHUNKSIZE = 250000000; /* 250,000,000 */ 29161a4ee126SBarry Smith PetscInt i,numchunks; 29171a4ee126SBarry Smith PetscMPIInt icnt; 29181a4ee126SBarry Smith 29191a4ee126SBarry Smith numchunks = cnt/CHUNKSIZE + 1; 29201a4ee126SBarry Smith for (i=0; i<numchunks; i++) { 29211a4ee126SBarry Smith icnt = PetscMPIIntCast((i < numchunks-1) ? CHUNKSIZE : cnt - (numchunks-1)*CHUNKSIZE); 29221a4ee126SBarry Smith ierr = MPI_Send(mess,icnt,type,to,tag,comm); 29231a4ee126SBarry Smith if (type == MPIU_INT) { 29241a4ee126SBarry Smith mess = (void*) (((PetscInt*)mess) + CHUNKSIZE); 29251a4ee126SBarry Smith } else if (type == MPIU_SCALAR) { 29261a4ee126SBarry Smith mess = (void*) (((PetscScalar*)mess) + CHUNKSIZE); 29271a4ee126SBarry Smith } else SETERRQ(comm,PETSC_ERR_SUP,"No support for this datatype"); 29281a4ee126SBarry Smith } 29291a4ee126SBarry Smith return 0; 29301a4ee126SBarry Smith } 29311a4ee126SBarry Smith static int MPILong_Recv(void *mess,PetscInt cnt, MPI_Datatype type,int from, int tag, MPI_Comm comm) 29321a4ee126SBarry Smith { 29331a4ee126SBarry Smith int ierr; 29341a4ee126SBarry Smith static PetscInt CHUNKSIZE = 250000000; /* 250,000,000 */ 29351a4ee126SBarry Smith MPI_Status status; 29361a4ee126SBarry Smith PetscInt i,numchunks; 29371a4ee126SBarry Smith PetscMPIInt icnt; 29381a4ee126SBarry Smith 29391a4ee126SBarry Smith numchunks = cnt/CHUNKSIZE + 1; 29401a4ee126SBarry Smith for (i=0; i<numchunks; i++) { 29411a4ee126SBarry Smith icnt = PetscMPIIntCast((i < numchunks-1) ? CHUNKSIZE : cnt - (numchunks-1)*CHUNKSIZE); 29421a4ee126SBarry Smith ierr = MPI_Recv(mess,icnt,type,from,tag,comm,&status); 29431a4ee126SBarry Smith if (type == MPIU_INT) { 29441a4ee126SBarry Smith mess = (void*) (((PetscInt*)mess) + CHUNKSIZE); 29451a4ee126SBarry Smith } else if (type == MPIU_SCALAR) { 29461a4ee126SBarry Smith mess = (void*) (((PetscScalar*)mess) + CHUNKSIZE); 29471a4ee126SBarry Smith } else SETERRQ(comm,PETSC_ERR_SUP,"No support for this datatype"); 29481a4ee126SBarry Smith } 29491a4ee126SBarry Smith return 0; 29501a4ee126SBarry Smith } 29511a4ee126SBarry Smith 29524a2ae208SSatish Balay #undef __FUNCT__ 29535bba2384SShri Abhyankar #define __FUNCT__ "MatLoad_MPIAIJ" 2954112444f4SShri Abhyankar PetscErrorCode MatLoad_MPIAIJ(Mat newMat, PetscViewer viewer) 29558fb81238SShri Abhyankar { 29568fb81238SShri Abhyankar PetscScalar *vals,*svals; 29578fb81238SShri Abhyankar MPI_Comm comm = ((PetscObject)viewer)->comm; 29588fb81238SShri Abhyankar PetscErrorCode ierr; 29591a4ee126SBarry Smith PetscMPIInt rank,size,tag = ((PetscObject)viewer)->tag; 29608fb81238SShri Abhyankar PetscInt i,nz,j,rstart,rend,mmax,maxnz = 0,grows,gcols; 29618fb81238SShri Abhyankar PetscInt header[4],*rowlengths = 0,M,N,m,*cols; 29628fb81238SShri Abhyankar PetscInt *ourlens = PETSC_NULL,*procsnz = PETSC_NULL,*offlens = PETSC_NULL,jj,*mycols,*smycols; 29638fb81238SShri Abhyankar PetscInt cend,cstart,n,*rowners,sizesset=1; 29648fb81238SShri Abhyankar int fd; 29658fb81238SShri Abhyankar 29668fb81238SShri Abhyankar PetscFunctionBegin; 29678fb81238SShri Abhyankar ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 29688fb81238SShri Abhyankar ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 29698fb81238SShri Abhyankar if (!rank) { 29708fb81238SShri Abhyankar ierr = PetscViewerBinaryGetDescriptor(viewer,&fd);CHKERRQ(ierr); 29718fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,(char *)header,4,PETSC_INT);CHKERRQ(ierr); 29728fb81238SShri Abhyankar if (header[0] != MAT_FILE_CLASSID) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED,"not matrix object"); 29738fb81238SShri Abhyankar } 29748fb81238SShri Abhyankar 29758fb81238SShri Abhyankar if (newMat->rmap->n < 0 && newMat->rmap->N < 0 && newMat->cmap->n < 0 && newMat->cmap->N < 0) sizesset = 0; 29768fb81238SShri Abhyankar 29778fb81238SShri Abhyankar ierr = MPI_Bcast(header+1,3,MPIU_INT,0,comm);CHKERRQ(ierr); 29788fb81238SShri Abhyankar M = header[1]; N = header[2]; 29798fb81238SShri Abhyankar /* If global rows/cols are set to PETSC_DECIDE, set it to the sizes given in the file */ 29808fb81238SShri Abhyankar if (sizesset && newMat->rmap->N < 0) newMat->rmap->N = M; 29818fb81238SShri Abhyankar if (sizesset && newMat->cmap->N < 0) newMat->cmap->N = N; 29828fb81238SShri Abhyankar 29838fb81238SShri Abhyankar /* If global sizes are set, check if they are consistent with that given in the file */ 29848fb81238SShri Abhyankar if (sizesset) { 29858fb81238SShri Abhyankar ierr = MatGetSize(newMat,&grows,&gcols);CHKERRQ(ierr); 29868fb81238SShri Abhyankar } 2987abd38a8fSBarry Smith if (sizesset && newMat->rmap->N != grows) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of rows:Matrix in file has (%d) and input matrix has (%d)",M,grows); 2988abd38a8fSBarry Smith if (sizesset && newMat->cmap->N != gcols) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_FILE_UNEXPECTED, "Inconsistent # of cols:Matrix in file has (%d) and input matrix has (%d)",N,gcols); 29898fb81238SShri Abhyankar 29908fb81238SShri Abhyankar /* determine ownership of all rows */ 29918fb81238SShri Abhyankar if (newMat->rmap->n < 0 ) m = M/size + ((M % size) > rank); /* PETSC_DECIDE */ 29924683f7a4SShri Abhyankar else m = newMat->rmap->n; /* Set by user */ 29938fb81238SShri Abhyankar 29948fb81238SShri Abhyankar ierr = PetscMalloc((size+1)*sizeof(PetscInt),&rowners);CHKERRQ(ierr); 29958fb81238SShri Abhyankar ierr = MPI_Allgather(&m,1,MPIU_INT,rowners+1,1,MPIU_INT,comm);CHKERRQ(ierr); 29968fb81238SShri Abhyankar 29978fb81238SShri Abhyankar /* First process needs enough room for process with most rows */ 29988fb81238SShri Abhyankar if (!rank) { 29998fb81238SShri Abhyankar mmax = rowners[1]; 30008fb81238SShri Abhyankar for (i=2; i<size; i++) { 30018fb81238SShri Abhyankar mmax = PetscMax(mmax,rowners[i]); 30028fb81238SShri Abhyankar } 30038fb81238SShri Abhyankar } else mmax = m; 30048fb81238SShri Abhyankar 30058fb81238SShri Abhyankar rowners[0] = 0; 30068fb81238SShri Abhyankar for (i=2; i<=size; i++) { 30078fb81238SShri Abhyankar rowners[i] += rowners[i-1]; 30088fb81238SShri Abhyankar } 30098fb81238SShri Abhyankar rstart = rowners[rank]; 30108fb81238SShri Abhyankar rend = rowners[rank+1]; 30118fb81238SShri Abhyankar 30128fb81238SShri Abhyankar /* distribute row lengths to all processors */ 30138fb81238SShri Abhyankar ierr = PetscMalloc2(mmax,PetscInt,&ourlens,mmax,PetscInt,&offlens);CHKERRQ(ierr); 30148fb81238SShri Abhyankar if (!rank) { 30158fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,ourlens,m,PETSC_INT);CHKERRQ(ierr); 30168fb81238SShri Abhyankar ierr = PetscMalloc(m*sizeof(PetscInt),&rowlengths);CHKERRQ(ierr); 30178fb81238SShri Abhyankar ierr = PetscMalloc(size*sizeof(PetscInt),&procsnz);CHKERRQ(ierr); 30188fb81238SShri Abhyankar ierr = PetscMemzero(procsnz,size*sizeof(PetscInt));CHKERRQ(ierr); 30198fb81238SShri Abhyankar for (j=0; j<m; j++) { 30208fb81238SShri Abhyankar procsnz[0] += ourlens[j]; 30218fb81238SShri Abhyankar } 30228fb81238SShri Abhyankar for (i=1; i<size; i++) { 30238fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,rowlengths,rowners[i+1]-rowners[i],PETSC_INT);CHKERRQ(ierr); 30248fb81238SShri Abhyankar /* calculate the number of nonzeros on each processor */ 30258fb81238SShri Abhyankar for (j=0; j<rowners[i+1]-rowners[i]; j++) { 30268fb81238SShri Abhyankar procsnz[i] += rowlengths[j]; 30278fb81238SShri Abhyankar } 30281a4ee126SBarry Smith ierr = MPILong_Send(rowlengths,rowners[i+1]-rowners[i],MPIU_INT,i,tag,comm);CHKERRQ(ierr); 30298fb81238SShri Abhyankar } 30308fb81238SShri Abhyankar ierr = PetscFree(rowlengths);CHKERRQ(ierr); 30318fb81238SShri Abhyankar } else { 30321a4ee126SBarry Smith ierr = MPILong_Recv(ourlens,m,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 30338fb81238SShri Abhyankar } 30348fb81238SShri Abhyankar 30358fb81238SShri Abhyankar if (!rank) { 30368fb81238SShri Abhyankar /* determine max buffer needed and allocate it */ 30378fb81238SShri Abhyankar maxnz = 0; 30388fb81238SShri Abhyankar for (i=0; i<size; i++) { 30398fb81238SShri Abhyankar maxnz = PetscMax(maxnz,procsnz[i]); 30408fb81238SShri Abhyankar } 30418fb81238SShri Abhyankar ierr = PetscMalloc(maxnz*sizeof(PetscInt),&cols);CHKERRQ(ierr); 30428fb81238SShri Abhyankar 30438fb81238SShri Abhyankar /* read in my part of the matrix column indices */ 30448fb81238SShri Abhyankar nz = procsnz[0]; 30458fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 30468fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,mycols,nz,PETSC_INT);CHKERRQ(ierr); 30478fb81238SShri Abhyankar 30488fb81238SShri Abhyankar /* read in every one elses and ship off */ 30498fb81238SShri Abhyankar for (i=1; i<size; i++) { 30508fb81238SShri Abhyankar nz = procsnz[i]; 30518fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,cols,nz,PETSC_INT);CHKERRQ(ierr); 30521a4ee126SBarry Smith ierr = MPILong_Send(cols,nz,MPIU_INT,i,tag,comm);CHKERRQ(ierr); 30538fb81238SShri Abhyankar } 30548fb81238SShri Abhyankar ierr = PetscFree(cols);CHKERRQ(ierr); 30558fb81238SShri Abhyankar } else { 30568fb81238SShri Abhyankar /* determine buffer space needed for message */ 30578fb81238SShri Abhyankar nz = 0; 30588fb81238SShri Abhyankar for (i=0; i<m; i++) { 30598fb81238SShri Abhyankar nz += ourlens[i]; 30608fb81238SShri Abhyankar } 30618fb81238SShri Abhyankar ierr = PetscMalloc(nz*sizeof(PetscInt),&mycols);CHKERRQ(ierr); 30628fb81238SShri Abhyankar 30638fb81238SShri Abhyankar /* receive message of column indices*/ 30641a4ee126SBarry Smith ierr = MPILong_Recv(mycols,nz,MPIU_INT,0,tag,comm);CHKERRQ(ierr); 30658fb81238SShri Abhyankar } 30668fb81238SShri Abhyankar 30678fb81238SShri Abhyankar /* determine column ownership if matrix is not square */ 30688fb81238SShri Abhyankar if (N != M) { 30698fb81238SShri Abhyankar if (newMat->cmap->n < 0) n = N/size + ((N % size) > rank); 30708fb81238SShri Abhyankar else n = newMat->cmap->n; 30718fb81238SShri Abhyankar ierr = MPI_Scan(&n,&cend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 30728fb81238SShri Abhyankar cstart = cend - n; 30738fb81238SShri Abhyankar } else { 30748fb81238SShri Abhyankar cstart = rstart; 30758fb81238SShri Abhyankar cend = rend; 30768fb81238SShri Abhyankar n = cend - cstart; 30778fb81238SShri Abhyankar } 30788fb81238SShri Abhyankar 30798fb81238SShri Abhyankar /* loop over local rows, determining number of off diagonal entries */ 30808fb81238SShri Abhyankar ierr = PetscMemzero(offlens,m*sizeof(PetscInt));CHKERRQ(ierr); 30818fb81238SShri Abhyankar jj = 0; 30828fb81238SShri Abhyankar for (i=0; i<m; i++) { 30838fb81238SShri Abhyankar for (j=0; j<ourlens[i]; j++) { 30848fb81238SShri Abhyankar if (mycols[jj] < cstart || mycols[jj] >= cend) offlens[i]++; 30858fb81238SShri Abhyankar jj++; 30868fb81238SShri Abhyankar } 30878fb81238SShri Abhyankar } 30888fb81238SShri Abhyankar 30898fb81238SShri Abhyankar for (i=0; i<m; i++) { 30908fb81238SShri Abhyankar ourlens[i] -= offlens[i]; 30918fb81238SShri Abhyankar } 30928fb81238SShri Abhyankar if (!sizesset) { 30938fb81238SShri Abhyankar ierr = MatSetSizes(newMat,m,n,M,N);CHKERRQ(ierr); 30948fb81238SShri Abhyankar } 30958fb81238SShri Abhyankar ierr = MatMPIAIJSetPreallocation(newMat,0,ourlens,0,offlens);CHKERRQ(ierr); 30968fb81238SShri Abhyankar 30978fb81238SShri Abhyankar for (i=0; i<m; i++) { 30988fb81238SShri Abhyankar ourlens[i] += offlens[i]; 30998fb81238SShri Abhyankar } 31008fb81238SShri Abhyankar 31018fb81238SShri Abhyankar if (!rank) { 31028fb81238SShri Abhyankar ierr = PetscMalloc((maxnz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 31038fb81238SShri Abhyankar 31048fb81238SShri Abhyankar /* read in my part of the matrix numerical values */ 31058fb81238SShri Abhyankar nz = procsnz[0]; 31068fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 31078fb81238SShri Abhyankar 31088fb81238SShri Abhyankar /* insert into matrix */ 31098fb81238SShri Abhyankar jj = rstart; 31108fb81238SShri Abhyankar smycols = mycols; 31118fb81238SShri Abhyankar svals = vals; 31128fb81238SShri Abhyankar for (i=0; i<m; i++) { 31138fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 31148fb81238SShri Abhyankar smycols += ourlens[i]; 31158fb81238SShri Abhyankar svals += ourlens[i]; 31168fb81238SShri Abhyankar jj++; 31178fb81238SShri Abhyankar } 31188fb81238SShri Abhyankar 31198fb81238SShri Abhyankar /* read in other processors and ship out */ 31208fb81238SShri Abhyankar for (i=1; i<size; i++) { 31218fb81238SShri Abhyankar nz = procsnz[i]; 31228fb81238SShri Abhyankar ierr = PetscBinaryRead(fd,vals,nz,PETSC_SCALAR);CHKERRQ(ierr); 31231a4ee126SBarry Smith ierr = MPILong_Send(vals,nz,MPIU_SCALAR,i,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 31248fb81238SShri Abhyankar } 31258fb81238SShri Abhyankar ierr = PetscFree(procsnz);CHKERRQ(ierr); 31268fb81238SShri Abhyankar } else { 31278fb81238SShri Abhyankar /* receive numeric values */ 31288fb81238SShri Abhyankar ierr = PetscMalloc((nz+1)*sizeof(PetscScalar),&vals);CHKERRQ(ierr); 31298fb81238SShri Abhyankar 31308fb81238SShri Abhyankar /* receive message of values*/ 31311a4ee126SBarry Smith ierr = MPILong_Recv(vals,nz,MPIU_SCALAR,0,((PetscObject)newMat)->tag,comm);CHKERRQ(ierr); 31328fb81238SShri Abhyankar 31338fb81238SShri Abhyankar /* insert into matrix */ 31348fb81238SShri Abhyankar jj = rstart; 31358fb81238SShri Abhyankar smycols = mycols; 31368fb81238SShri Abhyankar svals = vals; 31378fb81238SShri Abhyankar for (i=0; i<m; i++) { 31388fb81238SShri Abhyankar ierr = MatSetValues_MPIAIJ(newMat,1,&jj,ourlens[i],smycols,svals,INSERT_VALUES);CHKERRQ(ierr); 31398fb81238SShri Abhyankar smycols += ourlens[i]; 31408fb81238SShri Abhyankar svals += ourlens[i]; 31418fb81238SShri Abhyankar jj++; 31428fb81238SShri Abhyankar } 31438fb81238SShri Abhyankar } 31448fb81238SShri Abhyankar ierr = PetscFree2(ourlens,offlens);CHKERRQ(ierr); 31458fb81238SShri Abhyankar ierr = PetscFree(vals);CHKERRQ(ierr); 31468fb81238SShri Abhyankar ierr = PetscFree(mycols);CHKERRQ(ierr); 31478fb81238SShri Abhyankar ierr = PetscFree(rowners);CHKERRQ(ierr); 31488fb81238SShri Abhyankar 31498fb81238SShri Abhyankar ierr = MatAssemblyBegin(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 31508fb81238SShri Abhyankar ierr = MatAssemblyEnd(newMat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 31518fb81238SShri Abhyankar PetscFunctionReturn(0); 31528fb81238SShri Abhyankar } 31538fb81238SShri Abhyankar 31548fb81238SShri Abhyankar #undef __FUNCT__ 31554a2ae208SSatish Balay #define __FUNCT__ "MatGetSubMatrix_MPIAIJ" 31564aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ(Mat mat,IS isrow,IS iscol,MatReuse call,Mat *newmat) 31574aa3045dSJed Brown { 31584aa3045dSJed Brown PetscErrorCode ierr; 31594aa3045dSJed Brown IS iscol_local; 31604aa3045dSJed Brown PetscInt csize; 31614aa3045dSJed Brown 31624aa3045dSJed Brown PetscFunctionBegin; 31634aa3045dSJed Brown ierr = ISGetLocalSize(iscol,&csize);CHKERRQ(ierr); 3164b79d0421SJed Brown if (call == MAT_REUSE_MATRIX) { 3165b79d0421SJed Brown ierr = PetscObjectQuery((PetscObject)*newmat,"ISAllGather",(PetscObject*)&iscol_local);CHKERRQ(ierr); 3166e32f2f54SBarry Smith if (!iscol_local) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3167b79d0421SJed Brown } else { 31684aa3045dSJed Brown ierr = ISAllGather(iscol,&iscol_local);CHKERRQ(ierr); 3169b79d0421SJed Brown } 31704aa3045dSJed Brown ierr = MatGetSubMatrix_MPIAIJ_Private(mat,isrow,iscol_local,csize,call,newmat);CHKERRQ(ierr); 3171b79d0421SJed Brown if (call == MAT_INITIAL_MATRIX) { 3172b79d0421SJed Brown ierr = PetscObjectCompose((PetscObject)*newmat,"ISAllGather",(PetscObject)iscol_local);CHKERRQ(ierr); 31734aa3045dSJed Brown ierr = ISDestroy(iscol_local);CHKERRQ(ierr); 3174b79d0421SJed Brown } 31754aa3045dSJed Brown PetscFunctionReturn(0); 31764aa3045dSJed Brown } 31774aa3045dSJed Brown 31784aa3045dSJed Brown #undef __FUNCT__ 31794aa3045dSJed Brown #define __FUNCT__ "MatGetSubMatrix_MPIAIJ_Private" 3180a0ff6018SBarry Smith /* 318129da9460SBarry Smith Not great since it makes two copies of the submatrix, first an SeqAIJ 318229da9460SBarry Smith in local and then by concatenating the local matrices the end result. 318329da9460SBarry Smith Writing it directly would be much like MatGetSubMatrices_MPIAIJ() 31844aa3045dSJed Brown 31854aa3045dSJed Brown Note: This requires a sequential iscol with all indices. 3186a0ff6018SBarry Smith */ 31874aa3045dSJed Brown PetscErrorCode MatGetSubMatrix_MPIAIJ_Private(Mat mat,IS isrow,IS iscol,PetscInt csize,MatReuse call,Mat *newmat) 3188a0ff6018SBarry Smith { 3189dfbe8321SBarry Smith PetscErrorCode ierr; 319032dcc486SBarry Smith PetscMPIInt rank,size; 3191b1d57f15SBarry Smith PetscInt i,m,n,rstart,row,rend,nz,*cwork,j; 3192b1d57f15SBarry Smith PetscInt *ii,*jj,nlocal,*dlens,*olens,dlen,olen,jend,mglobal; 3193fee21e36SBarry Smith Mat *local,M,Mreuse; 3194a77337e4SBarry Smith MatScalar *vwork,*aa; 31957adad957SLisandro Dalcin MPI_Comm comm = ((PetscObject)mat)->comm; 319600e6dbe6SBarry Smith Mat_SeqAIJ *aij; 31977e2c5f70SBarry Smith 3198a0ff6018SBarry Smith 3199a0ff6018SBarry Smith PetscFunctionBegin; 32001dab6e02SBarry Smith ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 32011dab6e02SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 320200e6dbe6SBarry Smith 3203fee21e36SBarry Smith if (call == MAT_REUSE_MATRIX) { 3204fee21e36SBarry Smith ierr = PetscObjectQuery((PetscObject)*newmat,"SubMatrix",(PetscObject *)&Mreuse);CHKERRQ(ierr); 3205e32f2f54SBarry Smith if (!Mreuse) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Submatrix passed in was not used before, cannot reuse"); 3206fee21e36SBarry Smith local = &Mreuse; 3207fee21e36SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_REUSE_MATRIX,&local);CHKERRQ(ierr); 3208fee21e36SBarry Smith } else { 3209a0ff6018SBarry Smith ierr = MatGetSubMatrices(mat,1,&isrow,&iscol,MAT_INITIAL_MATRIX,&local);CHKERRQ(ierr); 3210fee21e36SBarry Smith Mreuse = *local; 3211606d414cSSatish Balay ierr = PetscFree(local);CHKERRQ(ierr); 3212fee21e36SBarry Smith } 3213a0ff6018SBarry Smith 3214a0ff6018SBarry Smith /* 3215a0ff6018SBarry Smith m - number of local rows 3216a0ff6018SBarry Smith n - number of columns (same on all processors) 3217a0ff6018SBarry Smith rstart - first row in new global matrix generated 3218a0ff6018SBarry Smith */ 3219fee21e36SBarry Smith ierr = MatGetSize(Mreuse,&m,&n);CHKERRQ(ierr); 3220a0ff6018SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3221fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 322200e6dbe6SBarry Smith ii = aij->i; 322300e6dbe6SBarry Smith jj = aij->j; 322400e6dbe6SBarry Smith 3225a0ff6018SBarry Smith /* 322600e6dbe6SBarry Smith Determine the number of non-zeros in the diagonal and off-diagonal 322700e6dbe6SBarry Smith portions of the matrix in order to do correct preallocation 3228a0ff6018SBarry Smith */ 322900e6dbe6SBarry Smith 323000e6dbe6SBarry Smith /* first get start and end of "diagonal" columns */ 32316a6a5d1dSBarry Smith if (csize == PETSC_DECIDE) { 3232ab50ec6bSBarry Smith ierr = ISGetSize(isrow,&mglobal);CHKERRQ(ierr); 3233ab50ec6bSBarry Smith if (mglobal == n) { /* square matrix */ 3234e2c4fddaSBarry Smith nlocal = m; 32356a6a5d1dSBarry Smith } else { 3236ab50ec6bSBarry Smith nlocal = n/size + ((n % size) > rank); 3237ab50ec6bSBarry Smith } 3238ab50ec6bSBarry Smith } else { 32396a6a5d1dSBarry Smith nlocal = csize; 32406a6a5d1dSBarry Smith } 3241b1d57f15SBarry Smith ierr = MPI_Scan(&nlocal,&rend,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 324200e6dbe6SBarry Smith rstart = rend - nlocal; 324365e19b50SBarry Smith if (rank == size - 1 && rend != n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Local column sizes %D do not add up to total number of columns %D",rend,n); 324400e6dbe6SBarry Smith 324500e6dbe6SBarry Smith /* next, compute all the lengths */ 3246b1d57f15SBarry Smith ierr = PetscMalloc((2*m+1)*sizeof(PetscInt),&dlens);CHKERRQ(ierr); 324700e6dbe6SBarry Smith olens = dlens + m; 324800e6dbe6SBarry Smith for (i=0; i<m; i++) { 324900e6dbe6SBarry Smith jend = ii[i+1] - ii[i]; 325000e6dbe6SBarry Smith olen = 0; 325100e6dbe6SBarry Smith dlen = 0; 325200e6dbe6SBarry Smith for (j=0; j<jend; j++) { 325300e6dbe6SBarry Smith if (*jj < rstart || *jj >= rend) olen++; 325400e6dbe6SBarry Smith else dlen++; 325500e6dbe6SBarry Smith jj++; 325600e6dbe6SBarry Smith } 325700e6dbe6SBarry Smith olens[i] = olen; 325800e6dbe6SBarry Smith dlens[i] = dlen; 325900e6dbe6SBarry Smith } 3260f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&M);CHKERRQ(ierr); 3261f69a0ea3SMatthew Knepley ierr = MatSetSizes(M,m,nlocal,PETSC_DECIDE,n);CHKERRQ(ierr); 32627adad957SLisandro Dalcin ierr = MatSetType(M,((PetscObject)mat)->type_name);CHKERRQ(ierr); 3263e2d9671bSKris Buschelman ierr = MatMPIAIJSetPreallocation(M,0,dlens,0,olens);CHKERRQ(ierr); 3264606d414cSSatish Balay ierr = PetscFree(dlens);CHKERRQ(ierr); 3265a0ff6018SBarry Smith } else { 3266b1d57f15SBarry Smith PetscInt ml,nl; 3267a0ff6018SBarry Smith 3268a0ff6018SBarry Smith M = *newmat; 3269a0ff6018SBarry Smith ierr = MatGetLocalSize(M,&ml,&nl);CHKERRQ(ierr); 3270e32f2f54SBarry Smith if (ml != m) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Previous matrix must be same size/layout as request"); 3271a0ff6018SBarry Smith ierr = MatZeroEntries(M);CHKERRQ(ierr); 3272c48de900SBarry Smith /* 3273c48de900SBarry Smith The next two lines are needed so we may call MatSetValues_MPIAIJ() below directly, 3274c48de900SBarry Smith rather than the slower MatSetValues(). 3275c48de900SBarry Smith */ 3276c48de900SBarry Smith M->was_assembled = PETSC_TRUE; 3277c48de900SBarry Smith M->assembled = PETSC_FALSE; 3278a0ff6018SBarry Smith } 3279a0ff6018SBarry Smith ierr = MatGetOwnershipRange(M,&rstart,&rend);CHKERRQ(ierr); 3280fee21e36SBarry Smith aij = (Mat_SeqAIJ*)(Mreuse)->data; 328100e6dbe6SBarry Smith ii = aij->i; 328200e6dbe6SBarry Smith jj = aij->j; 328300e6dbe6SBarry Smith aa = aij->a; 3284a0ff6018SBarry Smith for (i=0; i<m; i++) { 3285a0ff6018SBarry Smith row = rstart + i; 328600e6dbe6SBarry Smith nz = ii[i+1] - ii[i]; 328700e6dbe6SBarry Smith cwork = jj; jj += nz; 328800e6dbe6SBarry Smith vwork = aa; aa += nz; 32898c638d02SBarry Smith ierr = MatSetValues_MPIAIJ(M,1,&row,nz,cwork,vwork,INSERT_VALUES);CHKERRQ(ierr); 3290a0ff6018SBarry Smith } 3291a0ff6018SBarry Smith 3292a0ff6018SBarry Smith ierr = MatAssemblyBegin(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3293a0ff6018SBarry Smith ierr = MatAssemblyEnd(M,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3294a0ff6018SBarry Smith *newmat = M; 3295fee21e36SBarry Smith 3296fee21e36SBarry Smith /* save submatrix used in processor for next request */ 3297fee21e36SBarry Smith if (call == MAT_INITIAL_MATRIX) { 3298fee21e36SBarry Smith ierr = PetscObjectCompose((PetscObject)M,"SubMatrix",(PetscObject)Mreuse);CHKERRQ(ierr); 3299fee21e36SBarry Smith ierr = PetscObjectDereference((PetscObject)Mreuse);CHKERRQ(ierr); 3300fee21e36SBarry Smith } 3301fee21e36SBarry Smith 3302a0ff6018SBarry Smith PetscFunctionReturn(0); 3303a0ff6018SBarry Smith } 3304273d9f13SBarry Smith 3305e2e86b8fSSatish Balay EXTERN_C_BEGIN 33064a2ae208SSatish Balay #undef __FUNCT__ 3307ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR_MPIAIJ" 3308b7940d39SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR_MPIAIJ(Mat B,const PetscInt Ii[],const PetscInt J[],const PetscScalar v[]) 3309ccd8e176SBarry Smith { 3310899cda47SBarry Smith PetscInt m,cstart, cend,j,nnz,i,d; 3311899cda47SBarry Smith PetscInt *d_nnz,*o_nnz,nnz_max = 0,rstart,ii; 3312ccd8e176SBarry Smith const PetscInt *JJ; 3313ccd8e176SBarry Smith PetscScalar *values; 3314ccd8e176SBarry Smith PetscErrorCode ierr; 3315ccd8e176SBarry Smith 3316ccd8e176SBarry Smith PetscFunctionBegin; 3317e32f2f54SBarry Smith if (Ii[0]) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Ii[0] must be 0 it is %D",Ii[0]); 3318899cda47SBarry Smith 331926283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->rmap,1);CHKERRQ(ierr); 332026283091SBarry Smith ierr = PetscLayoutSetBlockSize(B->cmap,1);CHKERRQ(ierr); 332126283091SBarry Smith ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr); 332226283091SBarry Smith ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr); 3323d0f46423SBarry Smith m = B->rmap->n; 3324d0f46423SBarry Smith cstart = B->cmap->rstart; 3325d0f46423SBarry Smith cend = B->cmap->rend; 3326d0f46423SBarry Smith rstart = B->rmap->rstart; 3327899cda47SBarry Smith 33281d79065fSBarry Smith ierr = PetscMalloc2(m,PetscInt,&d_nnz,m,PetscInt,&o_nnz);CHKERRQ(ierr); 3329ccd8e176SBarry Smith 3330ecc77c7aSBarry Smith #if defined(PETSC_USE_DEBUGGING) 3331ecc77c7aSBarry Smith for (i=0; i<m; i++) { 3332ecc77c7aSBarry Smith nnz = Ii[i+1]- Ii[i]; 3333ecc77c7aSBarry Smith JJ = J + Ii[i]; 3334e32f2f54SBarry Smith if (nnz < 0) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Local row %D has a negative %D number of columns",i,nnz); 3335ecc77c7aSBarry Smith if (nnz && (JJ[0] < 0)) SETERRRQ1(PETSC_ERR_ARG_WRONGSTATE,"Row %D starts with negative column index",i,j); 3336d0f46423SBarry Smith if (nnz && (JJ[nnz-1] >= B->cmap->N) SETERRRQ3(PETSC_ERR_ARG_WRONGSTATE,"Row %D ends with too large a column index %D (max allowed %D)",i,JJ[nnz-1],B->cmap->N); 3337ecc77c7aSBarry Smith } 3338ecc77c7aSBarry Smith #endif 3339ecc77c7aSBarry Smith 3340ccd8e176SBarry Smith for (i=0; i<m; i++) { 3341b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3342b7940d39SSatish Balay JJ = J + Ii[i]; 3343ccd8e176SBarry Smith nnz_max = PetscMax(nnz_max,nnz); 3344ccd8e176SBarry Smith d = 0; 33450daa03b5SJed Brown for (j=0; j<nnz; j++) { 33460daa03b5SJed Brown if (cstart <= JJ[j] && JJ[j] < cend) d++; 3347ccd8e176SBarry Smith } 3348ccd8e176SBarry Smith d_nnz[i] = d; 3349ccd8e176SBarry Smith o_nnz[i] = nnz - d; 3350ccd8e176SBarry Smith } 3351ccd8e176SBarry Smith ierr = MatMPIAIJSetPreallocation(B,0,d_nnz,0,o_nnz);CHKERRQ(ierr); 33521d79065fSBarry Smith ierr = PetscFree2(d_nnz,o_nnz);CHKERRQ(ierr); 3353ccd8e176SBarry Smith 3354ccd8e176SBarry Smith if (v) values = (PetscScalar*)v; 3355ccd8e176SBarry Smith else { 3356ccd8e176SBarry Smith ierr = PetscMalloc((nnz_max+1)*sizeof(PetscScalar),&values);CHKERRQ(ierr); 3357ccd8e176SBarry Smith ierr = PetscMemzero(values,nnz_max*sizeof(PetscScalar));CHKERRQ(ierr); 3358ccd8e176SBarry Smith } 3359ccd8e176SBarry Smith 3360ccd8e176SBarry Smith for (i=0; i<m; i++) { 3361ccd8e176SBarry Smith ii = i + rstart; 3362b7940d39SSatish Balay nnz = Ii[i+1]- Ii[i]; 3363b7940d39SSatish Balay ierr = MatSetValues_MPIAIJ(B,1,&ii,nnz,J+Ii[i],values+(v ? Ii[i] : 0),INSERT_VALUES);CHKERRQ(ierr); 3364ccd8e176SBarry Smith } 3365ccd8e176SBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3366ccd8e176SBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 3367ccd8e176SBarry Smith 3368ccd8e176SBarry Smith if (!v) { 3369ccd8e176SBarry Smith ierr = PetscFree(values);CHKERRQ(ierr); 3370ccd8e176SBarry Smith } 3371ccd8e176SBarry Smith PetscFunctionReturn(0); 3372ccd8e176SBarry Smith } 3373e2e86b8fSSatish Balay EXTERN_C_END 3374ccd8e176SBarry Smith 3375ccd8e176SBarry Smith #undef __FUNCT__ 3376ccd8e176SBarry Smith #define __FUNCT__ "MatMPIAIJSetPreallocationCSR" 33771eea217eSSatish Balay /*@ 3378ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR - Allocates memory for a sparse parallel matrix in AIJ format 3379ccd8e176SBarry Smith (the default parallel PETSc format). 3380ccd8e176SBarry Smith 3381ccd8e176SBarry Smith Collective on MPI_Comm 3382ccd8e176SBarry Smith 3383ccd8e176SBarry Smith Input Parameters: 3384a1661176SMatthew Knepley + B - the matrix 3385ccd8e176SBarry Smith . i - the indices into j for the start of each local row (starts with zero) 33860daa03b5SJed Brown . j - the column indices for each local row (starts with zero) 3387ccd8e176SBarry Smith - v - optional values in the matrix 3388ccd8e176SBarry Smith 3389ccd8e176SBarry Smith Level: developer 3390ccd8e176SBarry Smith 339112251496SSatish Balay Notes: 339212251496SSatish Balay The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 339312251496SSatish Balay thus you CANNOT change the matrix entries by changing the values of a[] after you have 339412251496SSatish Balay called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 339512251496SSatish Balay 339612251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 339712251496SSatish Balay 339812251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 339912251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 340012251496SSatish Balay as shown: 340112251496SSatish Balay 340212251496SSatish Balay 1 0 0 340312251496SSatish Balay 2 0 3 P0 340412251496SSatish Balay ------- 340512251496SSatish Balay 4 5 6 P1 340612251496SSatish Balay 340712251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 340812251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 340912251496SSatish Balay j = {0,0,2} [size = nz = 6] 341012251496SSatish Balay v = {1,2,3} [size = nz = 6] 341112251496SSatish Balay 341212251496SSatish Balay Process1 [P1]: rows_owned=[2] 341312251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 341412251496SSatish Balay j = {0,1,2} [size = nz = 6] 341512251496SSatish Balay v = {4,5,6} [size = nz = 6] 341612251496SSatish Balay 3417ccd8e176SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3418ccd8e176SBarry Smith 34192fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatCreateMPIAIJ(), MPIAIJ, 34208d7a6e47SBarry Smith MatCreateSeqAIJWithArrays(), MatCreateMPIAIJWithSplitArrays() 3421ccd8e176SBarry Smith @*/ 3422be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocationCSR(Mat B,const PetscInt i[],const PetscInt j[], const PetscScalar v[]) 3423ccd8e176SBarry Smith { 34244ac538c5SBarry Smith PetscErrorCode ierr; 3425ccd8e176SBarry Smith 3426ccd8e176SBarry Smith PetscFunctionBegin; 34274ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocationCSR_C",(Mat,const PetscInt[],const PetscInt[],const PetscScalar[]),(B,i,j,v));CHKERRQ(ierr); 3428ccd8e176SBarry Smith PetscFunctionReturn(0); 3429ccd8e176SBarry Smith } 3430ccd8e176SBarry Smith 3431ccd8e176SBarry Smith #undef __FUNCT__ 34324a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJSetPreallocation" 3433273d9f13SBarry Smith /*@C 3434ccd8e176SBarry Smith MatMPIAIJSetPreallocation - Preallocates memory for a sparse parallel matrix in AIJ format 3435273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3436273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3437273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3438273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3439273d9f13SBarry Smith 3440273d9f13SBarry Smith Collective on MPI_Comm 3441273d9f13SBarry Smith 3442273d9f13SBarry Smith Input Parameters: 3443273d9f13SBarry Smith + A - the matrix 3444273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3445273d9f13SBarry Smith (same value is used for all local rows) 3446273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3447273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3448273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3449273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3450273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3451273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3452273d9f13SBarry Smith submatrix (same value is used for all local rows). 3453273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3454273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3455273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3456273d9f13SBarry Smith structure. The size of this array is equal to the number 3457273d9f13SBarry Smith of local rows, i.e 'm'. 3458273d9f13SBarry Smith 345949a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 346049a6f317SBarry Smith 3461273d9f13SBarry Smith The AIJ format (also called the Yale sparse matrix format or 3462ccd8e176SBarry Smith compressed row storage (CSR)), is fully compatible with standard Fortran 77 34630598bfebSBarry Smith storage. The stored row and column indices begin with zero. 34640598bfebSBarry Smith See the <A href="../../docs/manual.pdf#nameddest=ch_mat">Mat chapter of the users manual</A> for details. 3465273d9f13SBarry Smith 3466273d9f13SBarry Smith The parallel matrix is partitioned such that the first m0 rows belong to 3467273d9f13SBarry Smith process 0, the next m1 rows belong to process 1, the next m2 rows belong 3468273d9f13SBarry Smith to process 2 etc.. where m0,m1,m2... are the input parameter 'm'. 3469273d9f13SBarry Smith 3470273d9f13SBarry Smith The DIAGONAL portion of the local submatrix of a processor can be defined 3471a05b864aSJed Brown as the submatrix which is obtained by extraction the part corresponding to 3472a05b864aSJed Brown the rows r1-r2 and columns c1-c2 of the global matrix, where r1 is the 3473a05b864aSJed Brown first row that belongs to the processor, r2 is the last row belonging to 3474a05b864aSJed Brown the this processor, and c1-c2 is range of indices of the local part of a 3475a05b864aSJed Brown vector suitable for applying the matrix to. This is an mxn matrix. In the 3476a05b864aSJed Brown common case of a square matrix, the row and column ranges are the same and 3477a05b864aSJed Brown the DIAGONAL part is also square. The remaining portion of the local 3478a05b864aSJed Brown submatrix (mxN) constitute the OFF-DIAGONAL portion. 3479273d9f13SBarry Smith 3480273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3481273d9f13SBarry Smith 3482aa95bbe8SBarry Smith You can call MatGetInfo() to get information on how effective the preallocation was; 3483aa95bbe8SBarry Smith for example the fields mallocs,nz_allocated,nz_used,nz_unneeded; 3484aa95bbe8SBarry Smith You can also run with the option -info and look for messages with the string 3485aa95bbe8SBarry Smith malloc in them to see if additional memory allocation was needed. 3486aa95bbe8SBarry Smith 3487273d9f13SBarry Smith Example usage: 3488273d9f13SBarry Smith 3489273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3490273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3491273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3492273d9f13SBarry Smith as follows: 3493273d9f13SBarry Smith 3494273d9f13SBarry Smith .vb 3495273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3496273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3497273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3498273d9f13SBarry Smith ------------------------------------- 3499273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3500273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3501273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3502273d9f13SBarry Smith ------------------------------------- 3503273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3504273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3505273d9f13SBarry Smith .ve 3506273d9f13SBarry Smith 3507273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3508273d9f13SBarry Smith 3509273d9f13SBarry Smith .vb 3510273d9f13SBarry Smith A B C 3511273d9f13SBarry Smith D E F 3512273d9f13SBarry Smith G H I 3513273d9f13SBarry Smith .ve 3514273d9f13SBarry Smith 3515273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3516273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3517273d9f13SBarry Smith 3518273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3519273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3520273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3521273d9f13SBarry Smith 3522273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3523273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3524273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3525273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3526273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3527273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3528273d9f13SBarry Smith 3529273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3530273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3531273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3532273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3533273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3534273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3535273d9f13SBarry Smith .vb 3536273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3537273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3538273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3539273d9f13SBarry Smith .ve 3540273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3541273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3542273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3543273d9f13SBarry Smith 34 values. 3544273d9f13SBarry Smith 3545273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3546273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3547273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3548273d9f13SBarry Smith .vb 3549273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3550273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3551273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3552273d9f13SBarry Smith .ve 3553273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3554273d9f13SBarry Smith hence pre-allocation is perfect. 3555273d9f13SBarry Smith 3556273d9f13SBarry Smith Level: intermediate 3557273d9f13SBarry Smith 3558273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3559273d9f13SBarry Smith 3560ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatCreateMPIAIJ(), MatMPIAIJSetPreallocationCSR(), 3561aa95bbe8SBarry Smith MPIAIJ, MatGetInfo() 3562273d9f13SBarry Smith @*/ 3563be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJSetPreallocation(Mat B,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[]) 3564273d9f13SBarry Smith { 35654ac538c5SBarry Smith PetscErrorCode ierr; 3566273d9f13SBarry Smith 3567273d9f13SBarry Smith PetscFunctionBegin; 35684ac538c5SBarry Smith ierr = PetscTryMethod(B,"MatMPIAIJSetPreallocation_C",(Mat,PetscInt,const PetscInt[],PetscInt,const PetscInt[]),(B,d_nz,d_nnz,o_nz,o_nnz));CHKERRQ(ierr); 3569273d9f13SBarry Smith PetscFunctionReturn(0); 3570273d9f13SBarry Smith } 3571273d9f13SBarry Smith 35724a2ae208SSatish Balay #undef __FUNCT__ 35732fb0ec9aSBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithArrays" 357458d36128SBarry Smith /*@ 35752fb0ec9aSBarry Smith MatCreateMPIAIJWithArrays - creates a MPI AIJ matrix using arrays that contain in standard 35762fb0ec9aSBarry Smith CSR format the local rows. 35772fb0ec9aSBarry Smith 35782fb0ec9aSBarry Smith Collective on MPI_Comm 35792fb0ec9aSBarry Smith 35802fb0ec9aSBarry Smith Input Parameters: 35812fb0ec9aSBarry Smith + comm - MPI communicator 35822fb0ec9aSBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 35832fb0ec9aSBarry Smith . n - This value should be the same as the local size used in creating the 35842fb0ec9aSBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 35852fb0ec9aSBarry Smith calculated if N is given) For square matrices n is almost always m. 35862fb0ec9aSBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 35872fb0ec9aSBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 35882fb0ec9aSBarry Smith . i - row indices 35892fb0ec9aSBarry Smith . j - column indices 35902fb0ec9aSBarry Smith - a - matrix values 35912fb0ec9aSBarry Smith 35922fb0ec9aSBarry Smith Output Parameter: 35932fb0ec9aSBarry Smith . mat - the matrix 359403bfb495SBarry Smith 35952fb0ec9aSBarry Smith Level: intermediate 35962fb0ec9aSBarry Smith 35972fb0ec9aSBarry Smith Notes: 35982fb0ec9aSBarry Smith The i, j, and a arrays ARE copied by this routine into the internal format used by PETSc; 35992fb0ec9aSBarry Smith thus you CANNOT change the matrix entries by changing the values of a[] after you have 36008d7a6e47SBarry Smith called this routine. Use MatCreateMPIAIJWithSplitArrays() to avoid needing to copy the arrays. 36012fb0ec9aSBarry Smith 360212251496SSatish Balay The i and j indices are 0 based, and i indices are indices corresponding to the local j array. 360312251496SSatish Balay 360412251496SSatish Balay The format which is used for the sparse matrix input, is equivalent to a 360512251496SSatish Balay row-major ordering.. i.e for the following matrix, the input data expected is 360612251496SSatish Balay as shown: 360712251496SSatish Balay 360812251496SSatish Balay 1 0 0 360912251496SSatish Balay 2 0 3 P0 361012251496SSatish Balay ------- 361112251496SSatish Balay 4 5 6 P1 361212251496SSatish Balay 361312251496SSatish Balay Process0 [P0]: rows_owned=[0,1] 361412251496SSatish Balay i = {0,1,3} [size = nrow+1 = 2+1] 361512251496SSatish Balay j = {0,0,2} [size = nz = 6] 361612251496SSatish Balay v = {1,2,3} [size = nz = 6] 361712251496SSatish Balay 361812251496SSatish Balay Process1 [P1]: rows_owned=[2] 361912251496SSatish Balay i = {0,3} [size = nrow+1 = 1+1] 362012251496SSatish Balay j = {0,1,2} [size = nz = 6] 362112251496SSatish Balay v = {4,5,6} [size = nz = 6] 36222fb0ec9aSBarry Smith 36232fb0ec9aSBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 36242fb0ec9aSBarry Smith 36252fb0ec9aSBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 36268d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithSplitArrays() 36272fb0ec9aSBarry Smith @*/ 362882b90586SSatish Balay PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,const PetscInt i[],const PetscInt j[],const PetscScalar a[],Mat *mat) 36292fb0ec9aSBarry Smith { 36302fb0ec9aSBarry Smith PetscErrorCode ierr; 36312fb0ec9aSBarry Smith 36322fb0ec9aSBarry Smith PetscFunctionBegin; 36332fb0ec9aSBarry Smith if (i[0]) { 3634e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 36352fb0ec9aSBarry Smith } 3636e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 36372fb0ec9aSBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 3638d4146a68SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 36392fb0ec9aSBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 36402fb0ec9aSBarry Smith ierr = MatMPIAIJSetPreallocationCSR(*mat,i,j,a);CHKERRQ(ierr); 36412fb0ec9aSBarry Smith PetscFunctionReturn(0); 36422fb0ec9aSBarry Smith } 36432fb0ec9aSBarry Smith 36442fb0ec9aSBarry Smith #undef __FUNCT__ 36454a2ae208SSatish Balay #define __FUNCT__ "MatCreateMPIAIJ" 3646273d9f13SBarry Smith /*@C 3647273d9f13SBarry Smith MatCreateMPIAIJ - Creates a sparse parallel matrix in AIJ format 3648273d9f13SBarry Smith (the default parallel PETSc format). For good matrix assembly performance 3649273d9f13SBarry Smith the user should preallocate the matrix storage by setting the parameters 3650273d9f13SBarry Smith d_nz (or d_nnz) and o_nz (or o_nnz). By setting these parameters accurately, 3651273d9f13SBarry Smith performance can be increased by more than a factor of 50. 3652273d9f13SBarry Smith 3653273d9f13SBarry Smith Collective on MPI_Comm 3654273d9f13SBarry Smith 3655273d9f13SBarry Smith Input Parameters: 3656273d9f13SBarry Smith + comm - MPI communicator 3657273d9f13SBarry Smith . m - number of local rows (or PETSC_DECIDE to have calculated if M is given) 3658273d9f13SBarry Smith This value should be the same as the local size used in creating the 3659273d9f13SBarry Smith y vector for the matrix-vector product y = Ax. 3660273d9f13SBarry Smith . n - This value should be the same as the local size used in creating the 3661273d9f13SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 3662273d9f13SBarry Smith calculated if N is given) For square matrices n is almost always m. 3663273d9f13SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 3664273d9f13SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 3665273d9f13SBarry Smith . d_nz - number of nonzeros per row in DIAGONAL portion of local submatrix 3666273d9f13SBarry Smith (same value is used for all local rows) 3667273d9f13SBarry Smith . d_nnz - array containing the number of nonzeros in the various rows of the 3668273d9f13SBarry Smith DIAGONAL portion of the local submatrix (possibly different for each row) 3669273d9f13SBarry Smith or PETSC_NULL, if d_nz is used to specify the nonzero structure. 3670273d9f13SBarry Smith The size of this array is equal to the number of local rows, i.e 'm'. 3671273d9f13SBarry Smith You must leave room for the diagonal entry even if it is zero. 3672273d9f13SBarry Smith . o_nz - number of nonzeros per row in the OFF-DIAGONAL portion of local 3673273d9f13SBarry Smith submatrix (same value is used for all local rows). 3674273d9f13SBarry Smith - o_nnz - array containing the number of nonzeros in the various rows of the 3675273d9f13SBarry Smith OFF-DIAGONAL portion of the local submatrix (possibly different for 3676273d9f13SBarry Smith each row) or PETSC_NULL, if o_nz is used to specify the nonzero 3677273d9f13SBarry Smith structure. The size of this array is equal to the number 3678273d9f13SBarry Smith of local rows, i.e 'm'. 3679273d9f13SBarry Smith 3680273d9f13SBarry Smith Output Parameter: 3681273d9f13SBarry Smith . A - the matrix 3682273d9f13SBarry Smith 3683175b88e8SBarry Smith It is recommended that one use the MatCreate(), MatSetType() and/or MatSetFromOptions(), 3684ae1d86c5SBarry Smith MatXXXXSetPreallocation() paradgm instead of this routine directly. 3685175b88e8SBarry Smith [MatXXXXSetPreallocation() is, for example, MatSeqAIJSetPreallocation] 3686175b88e8SBarry Smith 3687273d9f13SBarry Smith Notes: 368849a6f317SBarry Smith If the *_nnz parameter is given then the *_nz parameter is ignored 368949a6f317SBarry Smith 3690273d9f13SBarry Smith m,n,M,N parameters specify the size of the matrix, and its partitioning across 3691273d9f13SBarry Smith processors, while d_nz,d_nnz,o_nz,o_nnz parameters specify the approximate 3692273d9f13SBarry Smith storage requirements for this matrix. 3693273d9f13SBarry Smith 3694273d9f13SBarry Smith If PETSC_DECIDE or PETSC_DETERMINE is used for a particular argument on one 3695273d9f13SBarry Smith processor than it must be used on all processors that share the object for 3696273d9f13SBarry Smith that argument. 3697273d9f13SBarry Smith 3698273d9f13SBarry Smith The user MUST specify either the local or global matrix dimensions 3699273d9f13SBarry Smith (possibly both). 3700273d9f13SBarry Smith 370133a7c187SSatish Balay The parallel matrix is partitioned across processors such that the 370233a7c187SSatish Balay first m0 rows belong to process 0, the next m1 rows belong to 370333a7c187SSatish Balay process 1, the next m2 rows belong to process 2 etc.. where 370433a7c187SSatish Balay m0,m1,m2,.. are the input parameter 'm'. i.e each processor stores 370533a7c187SSatish Balay values corresponding to [m x N] submatrix. 3706273d9f13SBarry Smith 370733a7c187SSatish Balay The columns are logically partitioned with the n0 columns belonging 370833a7c187SSatish Balay to 0th partition, the next n1 columns belonging to the next 370933a7c187SSatish Balay partition etc.. where n0,n1,n2... are the the input parameter 'n'. 371033a7c187SSatish Balay 371133a7c187SSatish Balay The DIAGONAL portion of the local submatrix on any given processor 371233a7c187SSatish Balay is the submatrix corresponding to the rows and columns m,n 371333a7c187SSatish Balay corresponding to the given processor. i.e diagonal matrix on 371433a7c187SSatish Balay process 0 is [m0 x n0], diagonal matrix on process 1 is [m1 x n1] 371533a7c187SSatish Balay etc. The remaining portion of the local submatrix [m x (N-n)] 371633a7c187SSatish Balay constitute the OFF-DIAGONAL portion. The example below better 371733a7c187SSatish Balay illustrates this concept. 371833a7c187SSatish Balay 371933a7c187SSatish Balay For a square global matrix we define each processor's diagonal portion 372033a7c187SSatish Balay to be its local rows and the corresponding columns (a square submatrix); 372133a7c187SSatish Balay each processor's off-diagonal portion encompasses the remainder of the 372233a7c187SSatish Balay local matrix (a rectangular submatrix). 3723273d9f13SBarry Smith 3724273d9f13SBarry Smith If o_nnz, d_nnz are specified, then o_nz, and d_nz are ignored. 3725273d9f13SBarry Smith 372697d05335SKris Buschelman When calling this routine with a single process communicator, a matrix of 372797d05335SKris Buschelman type SEQAIJ is returned. If a matrix of type MPIAIJ is desired for this 372897d05335SKris Buschelman type of communicator, use the construction mechanism: 372978102f6cSMatthew Knepley MatCreate(...,&A); MatSetType(A,MATMPIAIJ); MatSetSizes(A, m,n,M,N); MatMPIAIJSetPreallocation(A,...); 373097d05335SKris Buschelman 3731273d9f13SBarry Smith By default, this format uses inodes (identical nodes) when possible. 3732273d9f13SBarry Smith We search for consecutive rows with the same nonzero structure, thereby 3733273d9f13SBarry Smith reusing matrix information to achieve increased efficiency. 3734273d9f13SBarry Smith 3735273d9f13SBarry Smith Options Database Keys: 3736923f20ffSKris Buschelman + -mat_no_inode - Do not use inodes 3737923f20ffSKris Buschelman . -mat_inode_limit <limit> - Sets inode limit (max limit=5) 3738273d9f13SBarry Smith - -mat_aij_oneindex - Internally use indexing starting at 1 3739273d9f13SBarry Smith rather than 0. Note that when calling MatSetValues(), 3740273d9f13SBarry Smith the user still MUST index entries starting at 0! 3741273d9f13SBarry Smith 3742273d9f13SBarry Smith 3743273d9f13SBarry Smith Example usage: 3744273d9f13SBarry Smith 3745273d9f13SBarry Smith Consider the following 8x8 matrix with 34 non-zero values, that is 3746273d9f13SBarry Smith assembled across 3 processors. Lets assume that proc0 owns 3 rows, 3747273d9f13SBarry Smith proc1 owns 3 rows, proc2 owns 2 rows. This division can be shown 3748273d9f13SBarry Smith as follows: 3749273d9f13SBarry Smith 3750273d9f13SBarry Smith .vb 3751273d9f13SBarry Smith 1 2 0 | 0 3 0 | 0 4 3752273d9f13SBarry Smith Proc0 0 5 6 | 7 0 0 | 8 0 3753273d9f13SBarry Smith 9 0 10 | 11 0 0 | 12 0 3754273d9f13SBarry Smith ------------------------------------- 3755273d9f13SBarry Smith 13 0 14 | 15 16 17 | 0 0 3756273d9f13SBarry Smith Proc1 0 18 0 | 19 20 21 | 0 0 3757273d9f13SBarry Smith 0 0 0 | 22 23 0 | 24 0 3758273d9f13SBarry Smith ------------------------------------- 3759273d9f13SBarry Smith Proc2 25 26 27 | 0 0 28 | 29 0 3760273d9f13SBarry Smith 30 0 0 | 31 32 33 | 0 34 3761273d9f13SBarry Smith .ve 3762273d9f13SBarry Smith 3763273d9f13SBarry Smith This can be represented as a collection of submatrices as: 3764273d9f13SBarry Smith 3765273d9f13SBarry Smith .vb 3766273d9f13SBarry Smith A B C 3767273d9f13SBarry Smith D E F 3768273d9f13SBarry Smith G H I 3769273d9f13SBarry Smith .ve 3770273d9f13SBarry Smith 3771273d9f13SBarry Smith Where the submatrices A,B,C are owned by proc0, D,E,F are 3772273d9f13SBarry Smith owned by proc1, G,H,I are owned by proc2. 3773273d9f13SBarry Smith 3774273d9f13SBarry Smith The 'm' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3775273d9f13SBarry Smith The 'n' parameters for proc0,proc1,proc2 are 3,3,2 respectively. 3776273d9f13SBarry Smith The 'M','N' parameters are 8,8, and have the same values on all procs. 3777273d9f13SBarry Smith 3778273d9f13SBarry Smith The DIAGONAL submatrices corresponding to proc0,proc1,proc2 are 3779273d9f13SBarry Smith submatrices [A], [E], [I] respectively. The OFF-DIAGONAL submatrices 3780273d9f13SBarry Smith corresponding to proc0,proc1,proc2 are [BC], [DF], [GH] respectively. 3781273d9f13SBarry Smith Internally, each processor stores the DIAGONAL part, and the OFF-DIAGONAL 3782273d9f13SBarry Smith part as SeqAIJ matrices. for eg: proc1 will store [E] as a SeqAIJ 3783273d9f13SBarry Smith matrix, ans [DF] as another SeqAIJ matrix. 3784273d9f13SBarry Smith 3785273d9f13SBarry Smith When d_nz, o_nz parameters are specified, d_nz storage elements are 3786273d9f13SBarry Smith allocated for every row of the local diagonal submatrix, and o_nz 3787273d9f13SBarry Smith storage locations are allocated for every row of the OFF-DIAGONAL submat. 3788273d9f13SBarry Smith One way to choose d_nz and o_nz is to use the max nonzerors per local 3789273d9f13SBarry Smith rows for each of the local DIAGONAL, and the OFF-DIAGONAL submatrices. 3790273d9f13SBarry Smith In this case, the values of d_nz,o_nz are: 3791273d9f13SBarry Smith .vb 3792273d9f13SBarry Smith proc0 : dnz = 2, o_nz = 2 3793273d9f13SBarry Smith proc1 : dnz = 3, o_nz = 2 3794273d9f13SBarry Smith proc2 : dnz = 1, o_nz = 4 3795273d9f13SBarry Smith .ve 3796273d9f13SBarry Smith We are allocating m*(d_nz+o_nz) storage locations for every proc. This 3797273d9f13SBarry Smith translates to 3*(2+2)=12 for proc0, 3*(3+2)=15 for proc1, 2*(1+4)=10 3798273d9f13SBarry Smith for proc3. i.e we are using 12+15+10=37 storage locations to store 3799273d9f13SBarry Smith 34 values. 3800273d9f13SBarry Smith 3801273d9f13SBarry Smith When d_nnz, o_nnz parameters are specified, the storage is specified 3802273d9f13SBarry Smith for every row, coresponding to both DIAGONAL and OFF-DIAGONAL submatrices. 3803273d9f13SBarry Smith In the above case the values for d_nnz,o_nnz are: 3804273d9f13SBarry Smith .vb 3805273d9f13SBarry Smith proc0: d_nnz = [2,2,2] and o_nnz = [2,2,2] 3806273d9f13SBarry Smith proc1: d_nnz = [3,3,2] and o_nnz = [2,1,1] 3807273d9f13SBarry Smith proc2: d_nnz = [1,1] and o_nnz = [4,4] 3808273d9f13SBarry Smith .ve 3809273d9f13SBarry Smith Here the space allocated is sum of all the above values i.e 34, and 3810273d9f13SBarry Smith hence pre-allocation is perfect. 3811273d9f13SBarry Smith 3812273d9f13SBarry Smith Level: intermediate 3813273d9f13SBarry Smith 3814273d9f13SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 3815273d9f13SBarry Smith 3816ccd8e176SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 38172fb0ec9aSBarry Smith MPIAIJ, MatCreateMPIAIJWithArrays() 3818273d9f13SBarry Smith @*/ 3819be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJ(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt d_nz,const PetscInt d_nnz[],PetscInt o_nz,const PetscInt o_nnz[],Mat *A) 3820273d9f13SBarry Smith { 38216849ba73SBarry Smith PetscErrorCode ierr; 3822b1d57f15SBarry Smith PetscMPIInt size; 3823273d9f13SBarry Smith 3824273d9f13SBarry Smith PetscFunctionBegin; 3825f69a0ea3SMatthew Knepley ierr = MatCreate(comm,A);CHKERRQ(ierr); 3826f69a0ea3SMatthew Knepley ierr = MatSetSizes(*A,m,n,M,N);CHKERRQ(ierr); 3827273d9f13SBarry Smith ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 3828273d9f13SBarry Smith if (size > 1) { 3829273d9f13SBarry Smith ierr = MatSetType(*A,MATMPIAIJ);CHKERRQ(ierr); 3830273d9f13SBarry Smith ierr = MatMPIAIJSetPreallocation(*A,d_nz,d_nnz,o_nz,o_nnz);CHKERRQ(ierr); 3831273d9f13SBarry Smith } else { 3832273d9f13SBarry Smith ierr = MatSetType(*A,MATSEQAIJ);CHKERRQ(ierr); 3833273d9f13SBarry Smith ierr = MatSeqAIJSetPreallocation(*A,d_nz,d_nnz);CHKERRQ(ierr); 3834273d9f13SBarry Smith } 3835273d9f13SBarry Smith PetscFunctionReturn(0); 3836273d9f13SBarry Smith } 3837195d93cdSBarry Smith 38384a2ae208SSatish Balay #undef __FUNCT__ 38394a2ae208SSatish Balay #define __FUNCT__ "MatMPIAIJGetSeqAIJ" 3840be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAIJGetSeqAIJ(Mat A,Mat *Ad,Mat *Ao,PetscInt *colmap[]) 3841195d93cdSBarry Smith { 3842195d93cdSBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ *)A->data; 3843b1d57f15SBarry Smith 3844195d93cdSBarry Smith PetscFunctionBegin; 3845195d93cdSBarry Smith *Ad = a->A; 3846195d93cdSBarry Smith *Ao = a->B; 3847195d93cdSBarry Smith *colmap = a->garray; 3848195d93cdSBarry Smith PetscFunctionReturn(0); 3849195d93cdSBarry Smith } 3850a2243be0SBarry Smith 3851a2243be0SBarry Smith #undef __FUNCT__ 3852a2243be0SBarry Smith #define __FUNCT__ "MatSetColoring_MPIAIJ" 3853dfbe8321SBarry Smith PetscErrorCode MatSetColoring_MPIAIJ(Mat A,ISColoring coloring) 3854a2243be0SBarry Smith { 3855dfbe8321SBarry Smith PetscErrorCode ierr; 3856b1d57f15SBarry Smith PetscInt i; 3857a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3858a2243be0SBarry Smith 3859a2243be0SBarry Smith PetscFunctionBegin; 38608ee2e534SBarry Smith if (coloring->ctype == IS_COLORING_GLOBAL) { 386108b6dcc0SBarry Smith ISColoringValue *allcolors,*colors; 3862a2243be0SBarry Smith ISColoring ocoloring; 3863a2243be0SBarry Smith 3864a2243be0SBarry Smith /* set coloring for diagonal portion */ 3865a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,coloring);CHKERRQ(ierr); 3866a2243be0SBarry Smith 3867a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 38687adad957SLisandro Dalcin ierr = ISAllGatherColors(((PetscObject)A)->comm,coloring->n,coloring->colors,PETSC_NULL,&allcolors);CHKERRQ(ierr); 3869d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3870d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3871a2243be0SBarry Smith colors[i] = allcolors[a->garray[i]]; 3872a2243be0SBarry Smith } 3873a2243be0SBarry Smith ierr = PetscFree(allcolors);CHKERRQ(ierr); 3874d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3875a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3876a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3877a2243be0SBarry Smith } else if (coloring->ctype == IS_COLORING_GHOSTED) { 387808b6dcc0SBarry Smith ISColoringValue *colors; 3879b1d57f15SBarry Smith PetscInt *larray; 3880a2243be0SBarry Smith ISColoring ocoloring; 3881a2243be0SBarry Smith 3882a2243be0SBarry Smith /* set coloring for diagonal portion */ 3883d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3884d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3885d0f46423SBarry Smith larray[i] = i + A->cmap->rstart; 3886a2243be0SBarry Smith } 3887d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->A->cmap->n,larray,PETSC_NULL,larray);CHKERRQ(ierr); 3888d0f46423SBarry Smith ierr = PetscMalloc((a->A->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3889d0f46423SBarry Smith for (i=0; i<a->A->cmap->n; i++) { 3890a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3891a2243be0SBarry Smith } 3892a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3893d0f46423SBarry Smith ierr = ISColoringCreate(PETSC_COMM_SELF,coloring->n,a->A->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3894a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->A,ocoloring);CHKERRQ(ierr); 3895a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3896a2243be0SBarry Smith 3897a2243be0SBarry Smith /* set coloring for off-diagonal portion */ 3898d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(PetscInt),&larray);CHKERRQ(ierr); 3899d0f46423SBarry Smith ierr = ISGlobalToLocalMappingApply(A->mapping,IS_GTOLM_MASK,a->B->cmap->n,a->garray,PETSC_NULL,larray);CHKERRQ(ierr); 3900d0f46423SBarry Smith ierr = PetscMalloc((a->B->cmap->n+1)*sizeof(ISColoringValue),&colors);CHKERRQ(ierr); 3901d0f46423SBarry Smith for (i=0; i<a->B->cmap->n; i++) { 3902a2243be0SBarry Smith colors[i] = coloring->colors[larray[i]]; 3903a2243be0SBarry Smith } 3904a2243be0SBarry Smith ierr = PetscFree(larray);CHKERRQ(ierr); 3905d0f46423SBarry Smith ierr = ISColoringCreate(MPI_COMM_SELF,coloring->n,a->B->cmap->n,colors,&ocoloring);CHKERRQ(ierr); 3906a2243be0SBarry Smith ierr = MatSetColoring_SeqAIJ(a->B,ocoloring);CHKERRQ(ierr); 3907a2243be0SBarry Smith ierr = ISColoringDestroy(ocoloring);CHKERRQ(ierr); 3908a2243be0SBarry Smith } else { 3909e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_SUP,"No support ISColoringType %d",(int)coloring->ctype); 3910a2243be0SBarry Smith } 3911a2243be0SBarry Smith 3912a2243be0SBarry Smith PetscFunctionReturn(0); 3913a2243be0SBarry Smith } 3914a2243be0SBarry Smith 3915dcf5cc72SBarry Smith #if defined(PETSC_HAVE_ADIC) 3916a2243be0SBarry Smith #undef __FUNCT__ 3917779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdic_MPIAIJ" 3918dfbe8321SBarry Smith PetscErrorCode MatSetValuesAdic_MPIAIJ(Mat A,void *advalues) 3919a2243be0SBarry Smith { 3920a2243be0SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3921dfbe8321SBarry Smith PetscErrorCode ierr; 3922a2243be0SBarry Smith 3923a2243be0SBarry Smith PetscFunctionBegin; 3924779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->A,advalues);CHKERRQ(ierr); 3925779c1a83SBarry Smith ierr = MatSetValuesAdic_SeqAIJ(a->B,advalues);CHKERRQ(ierr); 3926779c1a83SBarry Smith PetscFunctionReturn(0); 3927779c1a83SBarry Smith } 3928dcf5cc72SBarry Smith #endif 3929779c1a83SBarry Smith 3930779c1a83SBarry Smith #undef __FUNCT__ 3931779c1a83SBarry Smith #define __FUNCT__ "MatSetValuesAdifor_MPIAIJ" 3932b1d57f15SBarry Smith PetscErrorCode MatSetValuesAdifor_MPIAIJ(Mat A,PetscInt nl,void *advalues) 3933779c1a83SBarry Smith { 3934779c1a83SBarry Smith Mat_MPIAIJ *a = (Mat_MPIAIJ*)A->data; 3935dfbe8321SBarry Smith PetscErrorCode ierr; 3936779c1a83SBarry Smith 3937779c1a83SBarry Smith PetscFunctionBegin; 3938779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->A,nl,advalues);CHKERRQ(ierr); 3939779c1a83SBarry Smith ierr = MatSetValuesAdifor_SeqAIJ(a->B,nl,advalues);CHKERRQ(ierr); 3940a2243be0SBarry Smith PetscFunctionReturn(0); 3941a2243be0SBarry Smith } 3942c5d6d63eSBarry Smith 3943c5d6d63eSBarry Smith #undef __FUNCT__ 394451dd7536SBarry Smith #define __FUNCT__ "MatMerge" 3945bc08b0f1SBarry Smith /*@ 394651dd7536SBarry Smith MatMerge - Creates a single large PETSc matrix by concatinating sequential 394751dd7536SBarry Smith matrices from each processor 3948c5d6d63eSBarry Smith 3949c5d6d63eSBarry Smith Collective on MPI_Comm 3950c5d6d63eSBarry Smith 3951c5d6d63eSBarry Smith Input Parameters: 395251dd7536SBarry Smith + comm - the communicators the parallel matrix will live on 3953d6bb3c2dSHong Zhang . inmat - the input sequential matrices 39540e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 3955d6bb3c2dSHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 395651dd7536SBarry Smith 395751dd7536SBarry Smith Output Parameter: 395851dd7536SBarry Smith . outmat - the parallel matrix generated 3959c5d6d63eSBarry Smith 39607e25d530SSatish Balay Level: advanced 39617e25d530SSatish Balay 3962f08fae4eSHong Zhang Notes: The number of columns of the matrix in EACH processor MUST be the same. 3963c5d6d63eSBarry Smith 3964c5d6d63eSBarry Smith @*/ 3965be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge(MPI_Comm comm,Mat inmat,PetscInt n,MatReuse scall,Mat *outmat) 3966c5d6d63eSBarry Smith { 3967dfbe8321SBarry Smith PetscErrorCode ierr; 3968b7940d39SSatish Balay PetscInt m,N,i,rstart,nnz,Ii,*dnz,*onz; 3969ba8c8a56SBarry Smith PetscInt *indx; 3970ba8c8a56SBarry Smith PetscScalar *values; 3971c5d6d63eSBarry Smith 3972c5d6d63eSBarry Smith PetscFunctionBegin; 39730e36024fSHong Zhang ierr = MatGetSize(inmat,&m,&N);CHKERRQ(ierr); 3974d6bb3c2dSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 3975d6bb3c2dSHong Zhang /* count nonzeros in each row, for diagonal and off diagonal portion of matrix */ 39760e36024fSHong Zhang if (n == PETSC_DECIDE){ 3977357abbc8SBarry Smith ierr = PetscSplitOwnership(comm,&n,&N);CHKERRQ(ierr); 39780e36024fSHong Zhang } 3979357abbc8SBarry Smith ierr = MPI_Scan(&m, &rstart,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr); 3980357abbc8SBarry Smith rstart -= m; 3981d6bb3c2dSHong Zhang 3982d6bb3c2dSHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 3983d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 3984ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3985d6bb3c2dSHong Zhang ierr = MatPreallocateSet(i+rstart,nnz,indx,dnz,onz);CHKERRQ(ierr); 3986ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,PETSC_NULL);CHKERRQ(ierr); 3987d6bb3c2dSHong Zhang } 3988d6bb3c2dSHong Zhang /* This routine will ONLY return MPIAIJ type matrix */ 3989f69a0ea3SMatthew Knepley ierr = MatCreate(comm,outmat);CHKERRQ(ierr); 3990f69a0ea3SMatthew Knepley ierr = MatSetSizes(*outmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 3991d6bb3c2dSHong Zhang ierr = MatSetType(*outmat,MATMPIAIJ);CHKERRQ(ierr); 3992d6bb3c2dSHong Zhang ierr = MatMPIAIJSetPreallocation(*outmat,0,dnz,0,onz);CHKERRQ(ierr); 3993d6bb3c2dSHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 3994d6bb3c2dSHong Zhang 3995d6bb3c2dSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 3996d6bb3c2dSHong Zhang ierr = MatGetOwnershipRange(*outmat,&rstart,PETSC_NULL);CHKERRQ(ierr); 3997d6bb3c2dSHong Zhang } else { 3998e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 3999d6bb3c2dSHong Zhang } 4000d6bb3c2dSHong Zhang 4001d6bb3c2dSHong Zhang for (i=0;i<m;i++) { 4002ba8c8a56SBarry Smith ierr = MatGetRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 4003b7940d39SSatish Balay Ii = i + rstart; 4004b7940d39SSatish Balay ierr = MatSetValues(*outmat,1,&Ii,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 4005ba8c8a56SBarry Smith ierr = MatRestoreRow_SeqAIJ(inmat,i,&nnz,&indx,&values);CHKERRQ(ierr); 4006d6bb3c2dSHong Zhang } 4007d6bb3c2dSHong Zhang ierr = MatDestroy(inmat);CHKERRQ(ierr); 4008d6bb3c2dSHong Zhang ierr = MatAssemblyBegin(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4009d6bb3c2dSHong Zhang ierr = MatAssemblyEnd(*outmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 401051dd7536SBarry Smith 4011c5d6d63eSBarry Smith PetscFunctionReturn(0); 4012c5d6d63eSBarry Smith } 4013c5d6d63eSBarry Smith 4014c5d6d63eSBarry Smith #undef __FUNCT__ 4015c5d6d63eSBarry Smith #define __FUNCT__ "MatFileSplit" 4016dfbe8321SBarry Smith PetscErrorCode MatFileSplit(Mat A,char *outfile) 4017c5d6d63eSBarry Smith { 4018dfbe8321SBarry Smith PetscErrorCode ierr; 401932dcc486SBarry Smith PetscMPIInt rank; 4020b1d57f15SBarry Smith PetscInt m,N,i,rstart,nnz; 4021de4209c5SBarry Smith size_t len; 4022b1d57f15SBarry Smith const PetscInt *indx; 4023c5d6d63eSBarry Smith PetscViewer out; 4024c5d6d63eSBarry Smith char *name; 4025c5d6d63eSBarry Smith Mat B; 4026b3cc6726SBarry Smith const PetscScalar *values; 4027c5d6d63eSBarry Smith 4028c5d6d63eSBarry Smith PetscFunctionBegin; 4029c5d6d63eSBarry Smith ierr = MatGetLocalSize(A,&m,0);CHKERRQ(ierr); 4030c5d6d63eSBarry Smith ierr = MatGetSize(A,0,&N);CHKERRQ(ierr); 4031f204ca49SKris Buschelman /* Should this be the type of the diagonal block of A? */ 4032f69a0ea3SMatthew Knepley ierr = MatCreate(PETSC_COMM_SELF,&B);CHKERRQ(ierr); 4033f69a0ea3SMatthew Knepley ierr = MatSetSizes(B,m,N,m,N);CHKERRQ(ierr); 4034f204ca49SKris Buschelman ierr = MatSetType(B,MATSEQAIJ);CHKERRQ(ierr); 4035f204ca49SKris Buschelman ierr = MatSeqAIJSetPreallocation(B,0,PETSC_NULL);CHKERRQ(ierr); 4036c5d6d63eSBarry Smith ierr = MatGetOwnershipRange(A,&rstart,0);CHKERRQ(ierr); 4037c5d6d63eSBarry Smith for (i=0;i<m;i++) { 4038c5d6d63eSBarry Smith ierr = MatGetRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4039c5d6d63eSBarry Smith ierr = MatSetValues(B,1,&i,nnz,indx,values,INSERT_VALUES);CHKERRQ(ierr); 4040c5d6d63eSBarry Smith ierr = MatRestoreRow(A,i+rstart,&nnz,&indx,&values);CHKERRQ(ierr); 4041c5d6d63eSBarry Smith } 4042c5d6d63eSBarry Smith ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4043c5d6d63eSBarry Smith ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 4044c5d6d63eSBarry Smith 40457adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)A)->comm,&rank);CHKERRQ(ierr); 4046c5d6d63eSBarry Smith ierr = PetscStrlen(outfile,&len);CHKERRQ(ierr); 4047c5d6d63eSBarry Smith ierr = PetscMalloc((len+5)*sizeof(char),&name);CHKERRQ(ierr); 4048c5d6d63eSBarry Smith sprintf(name,"%s.%d",outfile,rank); 4049852598b0SBarry Smith ierr = PetscViewerBinaryOpen(PETSC_COMM_SELF,name,FILE_MODE_APPEND,&out);CHKERRQ(ierr); 4050c5d6d63eSBarry Smith ierr = PetscFree(name); 4051c5d6d63eSBarry Smith ierr = MatView(B,out);CHKERRQ(ierr); 4052c5d6d63eSBarry Smith ierr = PetscViewerDestroy(out);CHKERRQ(ierr); 4053c5d6d63eSBarry Smith ierr = MatDestroy(B);CHKERRQ(ierr); 4054c5d6d63eSBarry Smith PetscFunctionReturn(0); 4055c5d6d63eSBarry Smith } 4056e5f2cdd8SHong Zhang 405751a7d1a8SHong Zhang EXTERN PetscErrorCode MatDestroy_MPIAIJ(Mat); 405851a7d1a8SHong Zhang #undef __FUNCT__ 405951a7d1a8SHong Zhang #define __FUNCT__ "MatDestroy_MPIAIJ_SeqsToMPI" 4060be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatDestroy_MPIAIJ_SeqsToMPI(Mat A) 406151a7d1a8SHong Zhang { 406251a7d1a8SHong Zhang PetscErrorCode ierr; 4063671beff6SHong Zhang Mat_Merge_SeqsToMPI *merge; 4064776b82aeSLisandro Dalcin PetscContainer container; 406551a7d1a8SHong Zhang 406651a7d1a8SHong Zhang PetscFunctionBegin; 4067671beff6SHong Zhang ierr = PetscObjectQuery((PetscObject)A,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 4068671beff6SHong Zhang if (container) { 4069776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 407051a7d1a8SHong Zhang ierr = PetscFree(merge->id_r);CHKERRQ(ierr); 40713e06a4e6SHong Zhang ierr = PetscFree(merge->len_s);CHKERRQ(ierr); 40723e06a4e6SHong Zhang ierr = PetscFree(merge->len_r);CHKERRQ(ierr); 407351a7d1a8SHong Zhang ierr = PetscFree(merge->bi);CHKERRQ(ierr); 407451a7d1a8SHong Zhang ierr = PetscFree(merge->bj);CHKERRQ(ierr); 4075533163c2SBarry Smith ierr = PetscFree(merge->buf_ri[0]);CHKERRQ(ierr); 407602c68681SHong Zhang ierr = PetscFree(merge->buf_ri);CHKERRQ(ierr); 4077533163c2SBarry Smith ierr = PetscFree(merge->buf_rj[0]);CHKERRQ(ierr); 407802c68681SHong Zhang ierr = PetscFree(merge->buf_rj);CHKERRQ(ierr); 407905b42c5fSBarry Smith ierr = PetscFree(merge->coi);CHKERRQ(ierr); 408005b42c5fSBarry Smith ierr = PetscFree(merge->coj);CHKERRQ(ierr); 408105b42c5fSBarry Smith ierr = PetscFree(merge->owners_co);CHKERRQ(ierr); 408226283091SBarry Smith ierr = PetscLayoutDestroy(merge->rowmap);CHKERRQ(ierr); 4083671beff6SHong Zhang 4084776b82aeSLisandro Dalcin ierr = PetscContainerDestroy(container);CHKERRQ(ierr); 4085671beff6SHong Zhang ierr = PetscObjectCompose((PetscObject)A,"MatMergeSeqsToMPI",0);CHKERRQ(ierr); 4086671beff6SHong Zhang } 408751a7d1a8SHong Zhang ierr = PetscFree(merge);CHKERRQ(ierr); 408851a7d1a8SHong Zhang 408951a7d1a8SHong Zhang ierr = MatDestroy_MPIAIJ(A);CHKERRQ(ierr); 409051a7d1a8SHong Zhang PetscFunctionReturn(0); 409151a7d1a8SHong Zhang } 409251a7d1a8SHong Zhang 40937c4f633dSBarry Smith #include "../src/mat/utils/freespace.h" 4094be0fcf8dSHong Zhang #include "petscbt.h" 40954ebed01fSBarry Smith 4096e5f2cdd8SHong Zhang #undef __FUNCT__ 409738f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPINumeric" 4098e5f2cdd8SHong Zhang /*@C 4099f08fae4eSHong Zhang MatMerge_SeqsToMPI - Creates a MPIAIJ matrix by adding sequential 4100e5f2cdd8SHong Zhang matrices from each processor 4101e5f2cdd8SHong Zhang 4102e5f2cdd8SHong Zhang Collective on MPI_Comm 4103e5f2cdd8SHong Zhang 4104e5f2cdd8SHong Zhang Input Parameters: 4105e5f2cdd8SHong Zhang + comm - the communicators the parallel matrix will live on 4106f08fae4eSHong Zhang . seqmat - the input sequential matrices 41070e36024fSHong Zhang . m - number of local rows (or PETSC_DECIDE) 41080e36024fSHong Zhang . n - number of local columns (or PETSC_DECIDE) 4109e5f2cdd8SHong Zhang - scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 4110e5f2cdd8SHong Zhang 4111e5f2cdd8SHong Zhang Output Parameter: 4112f08fae4eSHong Zhang . mpimat - the parallel matrix generated 4113e5f2cdd8SHong Zhang 4114e5f2cdd8SHong Zhang Level: advanced 4115e5f2cdd8SHong Zhang 4116affca5deSHong Zhang Notes: 4117affca5deSHong Zhang The dimensions of the sequential matrix in each processor MUST be the same. 4118affca5deSHong Zhang The input seqmat is included into the container "Mat_Merge_SeqsToMPI", and will be 4119affca5deSHong Zhang destroyed when mpimat is destroyed. Call PetscObjectQuery() to access seqmat. 4120e5f2cdd8SHong Zhang @*/ 4121be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPINumeric(Mat seqmat,Mat mpimat) 412255d1abb9SHong Zhang { 412355d1abb9SHong Zhang PetscErrorCode ierr; 41247adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)mpimat)->comm; 412555d1abb9SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4126b1d57f15SBarry Smith PetscMPIInt size,rank,taga,*len_s; 4127d0f46423SBarry Smith PetscInt N=mpimat->cmap->N,i,j,*owners,*ai=a->i,*aj=a->j; 4128b1d57f15SBarry Smith PetscInt proc,m; 4129b1d57f15SBarry Smith PetscInt **buf_ri,**buf_rj; 4130b1d57f15SBarry Smith PetscInt k,anzi,*bj_i,*bi,*bj,arow,bnzi,nextaj; 4131b1d57f15SBarry Smith PetscInt nrows,**buf_ri_k,**nextrow,**nextai; 413255d1abb9SHong Zhang MPI_Request *s_waits,*r_waits; 413355d1abb9SHong Zhang MPI_Status *status; 4134a77337e4SBarry Smith MatScalar *aa=a->a; 4135dd6ea824SBarry Smith MatScalar **abuf_r,*ba_i; 413655d1abb9SHong Zhang Mat_Merge_SeqsToMPI *merge; 4137776b82aeSLisandro Dalcin PetscContainer container; 413855d1abb9SHong Zhang 413955d1abb9SHong Zhang PetscFunctionBegin; 41404ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 41413c2c1871SHong Zhang 414255d1abb9SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 414355d1abb9SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 414455d1abb9SHong Zhang 414555d1abb9SHong Zhang ierr = PetscObjectQuery((PetscObject)mpimat,"MatMergeSeqsToMPI",(PetscObject *)&container);CHKERRQ(ierr); 414655d1abb9SHong Zhang if (container) { 4147776b82aeSLisandro Dalcin ierr = PetscContainerGetPointer(container,(void **)&merge);CHKERRQ(ierr); 414855d1abb9SHong Zhang } 414955d1abb9SHong Zhang bi = merge->bi; 415055d1abb9SHong Zhang bj = merge->bj; 415155d1abb9SHong Zhang buf_ri = merge->buf_ri; 415255d1abb9SHong Zhang buf_rj = merge->buf_rj; 415355d1abb9SHong Zhang 415455d1abb9SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 41557a2fc3feSBarry Smith owners = merge->rowmap->range; 415655d1abb9SHong Zhang len_s = merge->len_s; 415755d1abb9SHong Zhang 415855d1abb9SHong Zhang /* send and recv matrix values */ 415955d1abb9SHong Zhang /*-----------------------------*/ 4160357abbc8SBarry Smith ierr = PetscObjectGetNewTag((PetscObject)mpimat,&taga);CHKERRQ(ierr); 416155d1abb9SHong Zhang ierr = PetscPostIrecvScalar(comm,taga,merge->nrecv,merge->id_r,merge->len_r,&abuf_r,&r_waits);CHKERRQ(ierr); 416255d1abb9SHong Zhang 416355d1abb9SHong Zhang ierr = PetscMalloc((merge->nsend+1)*sizeof(MPI_Request),&s_waits);CHKERRQ(ierr); 416455d1abb9SHong Zhang for (proc=0,k=0; proc<size; proc++){ 416555d1abb9SHong Zhang if (!len_s[proc]) continue; 416655d1abb9SHong Zhang i = owners[proc]; 416755d1abb9SHong Zhang ierr = MPI_Isend(aa+ai[i],len_s[proc],MPIU_MATSCALAR,proc,taga,comm,s_waits+k);CHKERRQ(ierr); 416855d1abb9SHong Zhang k++; 416955d1abb9SHong Zhang } 417055d1abb9SHong Zhang 41710c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,r_waits,status);CHKERRQ(ierr);} 41720c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,s_waits,status);CHKERRQ(ierr);} 417355d1abb9SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 417455d1abb9SHong Zhang 417555d1abb9SHong Zhang ierr = PetscFree(s_waits);CHKERRQ(ierr); 417655d1abb9SHong Zhang ierr = PetscFree(r_waits);CHKERRQ(ierr); 417755d1abb9SHong Zhang 417855d1abb9SHong Zhang /* insert mat values of mpimat */ 417955d1abb9SHong Zhang /*----------------------------*/ 4180a77337e4SBarry Smith ierr = PetscMalloc(N*sizeof(PetscScalar),&ba_i);CHKERRQ(ierr); 41810572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 418255d1abb9SHong Zhang 418355d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ 418455d1abb9SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 418555d1abb9SHong Zhang nrows = *(buf_ri_k[k]); 418655d1abb9SHong Zhang nextrow[k] = buf_ri_k[k]+1; /* next row number of k-th recved i-structure */ 418755d1abb9SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 418855d1abb9SHong Zhang } 418955d1abb9SHong Zhang 419055d1abb9SHong Zhang /* set values of ba */ 41917a2fc3feSBarry Smith m = merge->rowmap->n; 419255d1abb9SHong Zhang for (i=0; i<m; i++) { 419355d1abb9SHong Zhang arow = owners[rank] + i; 419455d1abb9SHong Zhang bj_i = bj+bi[i]; /* col indices of the i-th row of mpimat */ 419555d1abb9SHong Zhang bnzi = bi[i+1] - bi[i]; 4196a77337e4SBarry Smith ierr = PetscMemzero(ba_i,bnzi*sizeof(PetscScalar));CHKERRQ(ierr); 419755d1abb9SHong Zhang 419855d1abb9SHong Zhang /* add local non-zero vals of this proc's seqmat into ba */ 419955d1abb9SHong Zhang anzi = ai[arow+1] - ai[arow]; 420055d1abb9SHong Zhang aj = a->j + ai[arow]; 420155d1abb9SHong Zhang aa = a->a + ai[arow]; 420255d1abb9SHong Zhang nextaj = 0; 420355d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 420455d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 420555d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 420655d1abb9SHong Zhang } 420755d1abb9SHong Zhang } 420855d1abb9SHong Zhang 420955d1abb9SHong Zhang /* add received vals into ba */ 421055d1abb9SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 421155d1abb9SHong Zhang /* i-th row */ 421255d1abb9SHong Zhang if (i == *nextrow[k]) { 421355d1abb9SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 421455d1abb9SHong Zhang aj = buf_rj[k] + *(nextai[k]); 421555d1abb9SHong Zhang aa = abuf_r[k] + *(nextai[k]); 421655d1abb9SHong Zhang nextaj = 0; 421755d1abb9SHong Zhang for (j=0; nextaj<anzi; j++){ 421855d1abb9SHong Zhang if (*(bj_i + j) == aj[nextaj]){ /* bcol == acol */ 421955d1abb9SHong Zhang ba_i[j] += aa[nextaj++]; 422055d1abb9SHong Zhang } 422155d1abb9SHong Zhang } 422255d1abb9SHong Zhang nextrow[k]++; nextai[k]++; 422355d1abb9SHong Zhang } 422455d1abb9SHong Zhang } 422555d1abb9SHong Zhang ierr = MatSetValues(mpimat,1,&arow,bnzi,bj_i,ba_i,INSERT_VALUES);CHKERRQ(ierr); 422655d1abb9SHong Zhang } 422755d1abb9SHong Zhang ierr = MatAssemblyBegin(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 422855d1abb9SHong Zhang ierr = MatAssemblyEnd(mpimat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 422955d1abb9SHong Zhang 4230533163c2SBarry Smith ierr = PetscFree(abuf_r[0]);CHKERRQ(ierr); 423155d1abb9SHong Zhang ierr = PetscFree(abuf_r);CHKERRQ(ierr); 423255d1abb9SHong Zhang ierr = PetscFree(ba_i);CHKERRQ(ierr); 42331d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 42344ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompinum,seqmat,0,0,0);CHKERRQ(ierr); 423555d1abb9SHong Zhang PetscFunctionReturn(0); 423655d1abb9SHong Zhang } 423738f152feSBarry Smith 423838f152feSBarry Smith #undef __FUNCT__ 423938f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPISymbolic" 4240be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPISymbolic(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,Mat *mpimat) 4241e5f2cdd8SHong Zhang { 4242f08fae4eSHong Zhang PetscErrorCode ierr; 424355a3bba9SHong Zhang Mat B_mpi; 4244c2234fe3SHong Zhang Mat_SeqAIJ *a=(Mat_SeqAIJ*)seqmat->data; 4245b1d57f15SBarry Smith PetscMPIInt size,rank,tagi,tagj,*len_s,*len_si,*len_ri; 4246b1d57f15SBarry Smith PetscInt **buf_rj,**buf_ri,**buf_ri_k; 4247d0f46423SBarry Smith PetscInt M=seqmat->rmap->n,N=seqmat->cmap->n,i,*owners,*ai=a->i,*aj=a->j; 4248b1d57f15SBarry Smith PetscInt len,proc,*dnz,*onz; 4249b1d57f15SBarry Smith PetscInt k,anzi,*bi,*bj,*lnk,nlnk,arow,bnzi,nspacedouble=0; 4250b1d57f15SBarry Smith PetscInt nrows,*buf_s,*buf_si,*buf_si_i,**nextrow,**nextai; 425155d1abb9SHong Zhang MPI_Request *si_waits,*sj_waits,*ri_waits,*rj_waits; 425258cb9c82SHong Zhang MPI_Status *status; 4253a1a86e44SBarry Smith PetscFreeSpaceList free_space=PETSC_NULL,current_space=PETSC_NULL; 4254be0fcf8dSHong Zhang PetscBT lnkbt; 425551a7d1a8SHong Zhang Mat_Merge_SeqsToMPI *merge; 4256776b82aeSLisandro Dalcin PetscContainer container; 425702c68681SHong Zhang 4258e5f2cdd8SHong Zhang PetscFunctionBegin; 42594ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 42603c2c1871SHong Zhang 426138f152feSBarry Smith /* make sure it is a PETSc comm */ 426238f152feSBarry Smith ierr = PetscCommDuplicate(comm,&comm,PETSC_NULL);CHKERRQ(ierr); 4263e5f2cdd8SHong Zhang ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr); 4264e5f2cdd8SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 426555d1abb9SHong Zhang 426651a7d1a8SHong Zhang ierr = PetscNew(Mat_Merge_SeqsToMPI,&merge);CHKERRQ(ierr); 4267c2234fe3SHong Zhang ierr = PetscMalloc(size*sizeof(MPI_Status),&status);CHKERRQ(ierr); 4268e5f2cdd8SHong Zhang 42696abd8857SHong Zhang /* determine row ownership */ 4270f08fae4eSHong Zhang /*---------------------------------------------------------*/ 427126283091SBarry Smith ierr = PetscLayoutCreate(comm,&merge->rowmap);CHKERRQ(ierr); 427226283091SBarry Smith ierr = PetscLayoutSetLocalSize(merge->rowmap,m);CHKERRQ(ierr); 427326283091SBarry Smith ierr = PetscLayoutSetSize(merge->rowmap,M);CHKERRQ(ierr); 427426283091SBarry Smith ierr = PetscLayoutSetBlockSize(merge->rowmap,1);CHKERRQ(ierr); 427526283091SBarry Smith ierr = PetscLayoutSetUp(merge->rowmap);CHKERRQ(ierr); 4276b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&len_si);CHKERRQ(ierr); 4277b1d57f15SBarry Smith ierr = PetscMalloc(size*sizeof(PetscMPIInt),&merge->len_s);CHKERRQ(ierr); 427855d1abb9SHong Zhang 42797a2fc3feSBarry Smith m = merge->rowmap->n; 42807a2fc3feSBarry Smith M = merge->rowmap->N; 42817a2fc3feSBarry Smith owners = merge->rowmap->range; 42826abd8857SHong Zhang 42836abd8857SHong Zhang /* determine the number of messages to send, their lengths */ 42846abd8857SHong Zhang /*---------------------------------------------------------*/ 42853e06a4e6SHong Zhang len_s = merge->len_s; 428651a7d1a8SHong Zhang 42872257cef7SHong Zhang len = 0; /* length of buf_si[] */ 4288c2234fe3SHong Zhang merge->nsend = 0; 4289409913e3SHong Zhang for (proc=0; proc<size; proc++){ 42902257cef7SHong Zhang len_si[proc] = 0; 42913e06a4e6SHong Zhang if (proc == rank){ 42926abd8857SHong Zhang len_s[proc] = 0; 42933e06a4e6SHong Zhang } else { 429402c68681SHong Zhang len_si[proc] = owners[proc+1] - owners[proc] + 1; 42953e06a4e6SHong Zhang len_s[proc] = ai[owners[proc+1]] - ai[owners[proc]]; /* num of rows to be sent to [proc] */ 42963e06a4e6SHong Zhang } 42973e06a4e6SHong Zhang if (len_s[proc]) { 4298c2234fe3SHong Zhang merge->nsend++; 42992257cef7SHong Zhang nrows = 0; 43002257cef7SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 43012257cef7SHong Zhang if (ai[i+1] > ai[i]) nrows++; 43022257cef7SHong Zhang } 43032257cef7SHong Zhang len_si[proc] = 2*(nrows+1); 43042257cef7SHong Zhang len += len_si[proc]; 4305409913e3SHong Zhang } 430658cb9c82SHong Zhang } 4307409913e3SHong Zhang 43082257cef7SHong Zhang /* determine the number and length of messages to receive for ij-structure */ 43092257cef7SHong Zhang /*-------------------------------------------------------------------------*/ 431051a7d1a8SHong Zhang ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,len_s,&merge->nrecv);CHKERRQ(ierr); 431155d1abb9SHong Zhang ierr = PetscGatherMessageLengths2(comm,merge->nsend,merge->nrecv,len_s,len_si,&merge->id_r,&merge->len_r,&len_ri);CHKERRQ(ierr); 4312671beff6SHong Zhang 43133e06a4e6SHong Zhang /* post the Irecv of j-structure */ 43143e06a4e6SHong Zhang /*-------------------------------*/ 43152c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagj);CHKERRQ(ierr); 43163e06a4e6SHong Zhang ierr = PetscPostIrecvInt(comm,tagj,merge->nrecv,merge->id_r,merge->len_r,&buf_rj,&rj_waits);CHKERRQ(ierr); 431702c68681SHong Zhang 43183e06a4e6SHong Zhang /* post the Isend of j-structure */ 4319affca5deSHong Zhang /*--------------------------------*/ 43201d79065fSBarry Smith ierr = PetscMalloc2(merge->nsend,MPI_Request,&si_waits,merge->nsend,MPI_Request,&sj_waits);CHKERRQ(ierr); 43213e06a4e6SHong Zhang 43222257cef7SHong Zhang for (proc=0, k=0; proc<size; proc++){ 4323409913e3SHong Zhang if (!len_s[proc]) continue; 432402c68681SHong Zhang i = owners[proc]; 4325b1d57f15SBarry Smith ierr = MPI_Isend(aj+ai[i],len_s[proc],MPIU_INT,proc,tagj,comm,sj_waits+k);CHKERRQ(ierr); 432651a7d1a8SHong Zhang k++; 432751a7d1a8SHong Zhang } 432851a7d1a8SHong Zhang 43293e06a4e6SHong Zhang /* receives and sends of j-structure are complete */ 43303e06a4e6SHong Zhang /*------------------------------------------------*/ 43310c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,rj_waits,status);CHKERRQ(ierr);} 43320c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,sj_waits,status);CHKERRQ(ierr);} 433302c68681SHong Zhang 433402c68681SHong Zhang /* send and recv i-structure */ 433502c68681SHong Zhang /*---------------------------*/ 43362c72b5baSSatish Balay ierr = PetscCommGetNewTag(comm,&tagi);CHKERRQ(ierr); 433702c68681SHong Zhang ierr = PetscPostIrecvInt(comm,tagi,merge->nrecv,merge->id_r,len_ri,&buf_ri,&ri_waits);CHKERRQ(ierr); 433802c68681SHong Zhang 4339b1d57f15SBarry Smith ierr = PetscMalloc((len+1)*sizeof(PetscInt),&buf_s);CHKERRQ(ierr); 43403e06a4e6SHong Zhang buf_si = buf_s; /* points to the beginning of k-th msg to be sent */ 43412257cef7SHong Zhang for (proc=0,k=0; proc<size; proc++){ 434202c68681SHong Zhang if (!len_s[proc]) continue; 43433e06a4e6SHong Zhang /* form outgoing message for i-structure: 43443e06a4e6SHong Zhang buf_si[0]: nrows to be sent 43453e06a4e6SHong Zhang [1:nrows]: row index (global) 43463e06a4e6SHong Zhang [nrows+1:2*nrows+1]: i-structure index 43473e06a4e6SHong Zhang */ 43483e06a4e6SHong Zhang /*-------------------------------------------*/ 43492257cef7SHong Zhang nrows = len_si[proc]/2 - 1; 43503e06a4e6SHong Zhang buf_si_i = buf_si + nrows+1; 43513e06a4e6SHong Zhang buf_si[0] = nrows; 43523e06a4e6SHong Zhang buf_si_i[0] = 0; 43533e06a4e6SHong Zhang nrows = 0; 43543e06a4e6SHong Zhang for (i=owners[proc]; i<owners[proc+1]; i++){ 43553e06a4e6SHong Zhang anzi = ai[i+1] - ai[i]; 43563e06a4e6SHong Zhang if (anzi) { 43573e06a4e6SHong Zhang buf_si_i[nrows+1] = buf_si_i[nrows] + anzi; /* i-structure */ 43583e06a4e6SHong Zhang buf_si[nrows+1] = i-owners[proc]; /* local row index */ 43593e06a4e6SHong Zhang nrows++; 43603e06a4e6SHong Zhang } 43613e06a4e6SHong Zhang } 4362b1d57f15SBarry Smith ierr = MPI_Isend(buf_si,len_si[proc],MPIU_INT,proc,tagi,comm,si_waits+k);CHKERRQ(ierr); 436302c68681SHong Zhang k++; 43642257cef7SHong Zhang buf_si += len_si[proc]; 436502c68681SHong Zhang } 43662257cef7SHong Zhang 43670c468ba9SBarry Smith if (merge->nrecv) {ierr = MPI_Waitall(merge->nrecv,ri_waits,status);CHKERRQ(ierr);} 43680c468ba9SBarry Smith if (merge->nsend) {ierr = MPI_Waitall(merge->nsend,si_waits,status);CHKERRQ(ierr);} 436902c68681SHong Zhang 4370ae15b995SBarry Smith ierr = PetscInfo2(seqmat,"nsend: %D, nrecv: %D\n",merge->nsend,merge->nrecv);CHKERRQ(ierr); 43713e06a4e6SHong Zhang for (i=0; i<merge->nrecv; i++){ 4372ae15b995SBarry Smith ierr = PetscInfo3(seqmat,"recv len_ri=%D, len_rj=%D from [%D]\n",len_ri[i],merge->len_r[i],merge->id_r[i]);CHKERRQ(ierr); 43733e06a4e6SHong Zhang } 43743e06a4e6SHong Zhang 43753e06a4e6SHong Zhang ierr = PetscFree(len_si);CHKERRQ(ierr); 437602c68681SHong Zhang ierr = PetscFree(len_ri);CHKERRQ(ierr); 437702c68681SHong Zhang ierr = PetscFree(rj_waits);CHKERRQ(ierr); 43781d79065fSBarry Smith ierr = PetscFree2(si_waits,sj_waits);CHKERRQ(ierr); 43792257cef7SHong Zhang ierr = PetscFree(ri_waits);CHKERRQ(ierr); 43803e06a4e6SHong Zhang ierr = PetscFree(buf_s);CHKERRQ(ierr); 4381bcc1bcd5SHong Zhang ierr = PetscFree(status);CHKERRQ(ierr); 438258cb9c82SHong Zhang 4383bcc1bcd5SHong Zhang /* compute a local seq matrix in each processor */ 4384bcc1bcd5SHong Zhang /*----------------------------------------------*/ 438558cb9c82SHong Zhang /* allocate bi array and free space for accumulating nonzero column info */ 4386b1d57f15SBarry Smith ierr = PetscMalloc((m+1)*sizeof(PetscInt),&bi);CHKERRQ(ierr); 438758cb9c82SHong Zhang bi[0] = 0; 438858cb9c82SHong Zhang 4389be0fcf8dSHong Zhang /* create and initialize a linked list */ 4390be0fcf8dSHong Zhang nlnk = N+1; 4391be0fcf8dSHong Zhang ierr = PetscLLCreate(N,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 439258cb9c82SHong Zhang 4393bcc1bcd5SHong Zhang /* initial FreeSpace size is 2*(num of local nnz(seqmat)) */ 439458cb9c82SHong Zhang len = 0; 4395bcc1bcd5SHong Zhang len = ai[owners[rank+1]] - ai[owners[rank]]; 4396a1a86e44SBarry Smith ierr = PetscFreeSpaceGet((PetscInt)(2*len+1),&free_space);CHKERRQ(ierr); 439758cb9c82SHong Zhang current_space = free_space; 439858cb9c82SHong Zhang 4399bcc1bcd5SHong Zhang /* determine symbolic info for each local row */ 44000572522cSBarry Smith ierr = PetscMalloc3(merge->nrecv,PetscInt*,&buf_ri_k,merge->nrecv,PetscInt*,&nextrow,merge->nrecv,PetscInt*,&nextai);CHKERRQ(ierr); 44011d79065fSBarry Smith 44023e06a4e6SHong Zhang for (k=0; k<merge->nrecv; k++){ 44032257cef7SHong Zhang buf_ri_k[k] = buf_ri[k]; /* beginning of k-th recved i-structure */ 44043e06a4e6SHong Zhang nrows = *buf_ri_k[k]; 44053e06a4e6SHong Zhang nextrow[k] = buf_ri_k[k] + 1; /* next row number of k-th recved i-structure */ 44062257cef7SHong Zhang nextai[k] = buf_ri_k[k] + (nrows + 1);/* poins to the next i-structure of k-th recved i-structure */ 44073e06a4e6SHong Zhang } 44082257cef7SHong Zhang 4409bcc1bcd5SHong Zhang ierr = MatPreallocateInitialize(comm,m,n,dnz,onz);CHKERRQ(ierr); 4410bcc1bcd5SHong Zhang len = 0; 441158cb9c82SHong Zhang for (i=0;i<m;i++) { 441258cb9c82SHong Zhang bnzi = 0; 441358cb9c82SHong Zhang /* add local non-zero cols of this proc's seqmat into lnk */ 441458cb9c82SHong Zhang arow = owners[rank] + i; 441558cb9c82SHong Zhang anzi = ai[arow+1] - ai[arow]; 441658cb9c82SHong Zhang aj = a->j + ai[arow]; 4417be0fcf8dSHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 441858cb9c82SHong Zhang bnzi += nlnk; 441958cb9c82SHong Zhang /* add received col data into lnk */ 442051a7d1a8SHong Zhang for (k=0; k<merge->nrecv; k++){ /* k-th received message */ 442155d1abb9SHong Zhang if (i == *nextrow[k]) { /* i-th row */ 44223e06a4e6SHong Zhang anzi = *(nextai[k]+1) - *nextai[k]; 44233e06a4e6SHong Zhang aj = buf_rj[k] + *nextai[k]; 44243e06a4e6SHong Zhang ierr = PetscLLAdd(anzi,aj,N,nlnk,lnk,lnkbt);CHKERRQ(ierr); 44253e06a4e6SHong Zhang bnzi += nlnk; 44263e06a4e6SHong Zhang nextrow[k]++; nextai[k]++; 44273e06a4e6SHong Zhang } 442858cb9c82SHong Zhang } 4429bcc1bcd5SHong Zhang if (len < bnzi) len = bnzi; /* =max(bnzi) */ 443058cb9c82SHong Zhang 443158cb9c82SHong Zhang /* if free space is not available, make more free space */ 443258cb9c82SHong Zhang if (current_space->local_remaining<bnzi) { 44334238b7adSHong Zhang ierr = PetscFreeSpaceGet(bnzi+current_space->total_array_size,¤t_space);CHKERRQ(ierr); 443458cb9c82SHong Zhang nspacedouble++; 443558cb9c82SHong Zhang } 443658cb9c82SHong Zhang /* copy data into free space, then initialize lnk */ 4437be0fcf8dSHong Zhang ierr = PetscLLClean(N,N,bnzi,lnk,current_space->array,lnkbt);CHKERRQ(ierr); 4438bcc1bcd5SHong Zhang ierr = MatPreallocateSet(i+owners[rank],bnzi,current_space->array,dnz,onz);CHKERRQ(ierr); 4439bcc1bcd5SHong Zhang 444058cb9c82SHong Zhang current_space->array += bnzi; 444158cb9c82SHong Zhang current_space->local_used += bnzi; 444258cb9c82SHong Zhang current_space->local_remaining -= bnzi; 444358cb9c82SHong Zhang 444458cb9c82SHong Zhang bi[i+1] = bi[i] + bnzi; 444558cb9c82SHong Zhang } 4446bcc1bcd5SHong Zhang 44471d79065fSBarry Smith ierr = PetscFree3(buf_ri_k,nextrow,nextai);CHKERRQ(ierr); 4448bcc1bcd5SHong Zhang 4449b1d57f15SBarry Smith ierr = PetscMalloc((bi[m]+1)*sizeof(PetscInt),&bj);CHKERRQ(ierr); 4450a1a86e44SBarry Smith ierr = PetscFreeSpaceContiguous(&free_space,bj);CHKERRQ(ierr); 4451be0fcf8dSHong Zhang ierr = PetscLLDestroy(lnk,lnkbt);CHKERRQ(ierr); 4452409913e3SHong Zhang 4453bcc1bcd5SHong Zhang /* create symbolic parallel matrix B_mpi */ 4454bcc1bcd5SHong Zhang /*---------------------------------------*/ 4455f69a0ea3SMatthew Knepley ierr = MatCreate(comm,&B_mpi);CHKERRQ(ierr); 445654b84b50SHong Zhang if (n==PETSC_DECIDE) { 4457f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,N);CHKERRQ(ierr); 445854b84b50SHong Zhang } else { 4459f69a0ea3SMatthew Knepley ierr = MatSetSizes(B_mpi,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 446054b84b50SHong Zhang } 4461bcc1bcd5SHong Zhang ierr = MatSetType(B_mpi,MATMPIAIJ);CHKERRQ(ierr); 4462bcc1bcd5SHong Zhang ierr = MatMPIAIJSetPreallocation(B_mpi,0,dnz,0,onz);CHKERRQ(ierr); 4463bcc1bcd5SHong Zhang ierr = MatPreallocateFinalize(dnz,onz);CHKERRQ(ierr); 446458cb9c82SHong Zhang 44656abd8857SHong Zhang /* B_mpi is not ready for use - assembly will be done by MatMerge_SeqsToMPINumeric() */ 44666abd8857SHong Zhang B_mpi->assembled = PETSC_FALSE; 4467affca5deSHong Zhang B_mpi->ops->destroy = MatDestroy_MPIAIJ_SeqsToMPI; 4468affca5deSHong Zhang merge->bi = bi; 4469affca5deSHong Zhang merge->bj = bj; 447002c68681SHong Zhang merge->buf_ri = buf_ri; 447102c68681SHong Zhang merge->buf_rj = buf_rj; 4472de0260b3SHong Zhang merge->coi = PETSC_NULL; 4473de0260b3SHong Zhang merge->coj = PETSC_NULL; 4474de0260b3SHong Zhang merge->owners_co = PETSC_NULL; 4475affca5deSHong Zhang 4476affca5deSHong Zhang /* attach the supporting struct to B_mpi for reuse */ 4477776b82aeSLisandro Dalcin ierr = PetscContainerCreate(PETSC_COMM_SELF,&container);CHKERRQ(ierr); 4478776b82aeSLisandro Dalcin ierr = PetscContainerSetPointer(container,merge);CHKERRQ(ierr); 4479affca5deSHong Zhang ierr = PetscObjectCompose((PetscObject)B_mpi,"MatMergeSeqsToMPI",(PetscObject)container);CHKERRQ(ierr); 4480affca5deSHong Zhang *mpimat = B_mpi; 448138f152feSBarry Smith 448238f152feSBarry Smith ierr = PetscCommDestroy(&comm);CHKERRQ(ierr); 44834ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompisym,seqmat,0,0,0);CHKERRQ(ierr); 4484e5f2cdd8SHong Zhang PetscFunctionReturn(0); 4485e5f2cdd8SHong Zhang } 448625616d81SHong Zhang 448738f152feSBarry Smith #undef __FUNCT__ 448838f152feSBarry Smith #define __FUNCT__ "MatMerge_SeqsToMPI" 4489be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatMerge_SeqsToMPI(MPI_Comm comm,Mat seqmat,PetscInt m,PetscInt n,MatReuse scall,Mat *mpimat) 449055d1abb9SHong Zhang { 449155d1abb9SHong Zhang PetscErrorCode ierr; 449255d1abb9SHong Zhang 449355d1abb9SHong Zhang PetscFunctionBegin; 44944ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 449555d1abb9SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 449655d1abb9SHong Zhang ierr = MatMerge_SeqsToMPISymbolic(comm,seqmat,m,n,mpimat);CHKERRQ(ierr); 449755d1abb9SHong Zhang } 449855d1abb9SHong Zhang ierr = MatMerge_SeqsToMPINumeric(seqmat,*mpimat);CHKERRQ(ierr); 44994ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Seqstompi,seqmat,0,0,0);CHKERRQ(ierr); 450055d1abb9SHong Zhang PetscFunctionReturn(0); 450155d1abb9SHong Zhang } 45024ebed01fSBarry Smith 450325616d81SHong Zhang #undef __FUNCT__ 450425616d81SHong Zhang #define __FUNCT__ "MatGetLocalMat" 4505bc08b0f1SBarry Smith /*@ 450632fba14fSHong Zhang MatGetLocalMat - Creates a SeqAIJ matrix by taking all its local rows 450725616d81SHong Zhang 450832fba14fSHong Zhang Not Collective 450925616d81SHong Zhang 451025616d81SHong Zhang Input Parameters: 451125616d81SHong Zhang + A - the matrix 451225616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 451325616d81SHong Zhang 451425616d81SHong Zhang Output Parameter: 451525616d81SHong Zhang . A_loc - the local sequential matrix generated 451625616d81SHong Zhang 451725616d81SHong Zhang Level: developer 451825616d81SHong Zhang 451925616d81SHong Zhang @*/ 4520be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMat(Mat A,MatReuse scall,Mat *A_loc) 452125616d81SHong Zhang { 452225616d81SHong Zhang PetscErrorCode ierr; 452301b7ae99SHong Zhang Mat_MPIAIJ *mpimat=(Mat_MPIAIJ*)A->data; 452401b7ae99SHong Zhang Mat_SeqAIJ *mat,*a=(Mat_SeqAIJ*)(mpimat->A)->data,*b=(Mat_SeqAIJ*)(mpimat->B)->data; 452501b7ae99SHong Zhang PetscInt *ai=a->i,*aj=a->j,*bi=b->i,*bj=b->j,*cmap=mpimat->garray; 4526a77337e4SBarry Smith MatScalar *aa=a->a,*ba=b->a,*cam; 4527a77337e4SBarry Smith PetscScalar *ca; 4528d0f46423SBarry Smith PetscInt am=A->rmap->n,i,j,k,cstart=A->cmap->rstart; 45295a7d977cSHong Zhang PetscInt *ci,*cj,col,ncols_d,ncols_o,jo; 453025616d81SHong Zhang 453125616d81SHong Zhang PetscFunctionBegin; 45324ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 453301b7ae99SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4534dea91ad1SHong Zhang ierr = PetscMalloc((1+am)*sizeof(PetscInt),&ci);CHKERRQ(ierr); 4535dea91ad1SHong Zhang ci[0] = 0; 453601b7ae99SHong Zhang for (i=0; i<am; i++){ 4537dea91ad1SHong Zhang ci[i+1] = ci[i] + (ai[i+1] - ai[i]) + (bi[i+1] - bi[i]); 453801b7ae99SHong Zhang } 4539dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscInt),&cj);CHKERRQ(ierr); 4540dea91ad1SHong Zhang ierr = PetscMalloc((1+ci[am])*sizeof(PetscScalar),&ca);CHKERRQ(ierr); 4541dea91ad1SHong Zhang k = 0; 454201b7ae99SHong Zhang for (i=0; i<am; i++) { 45435a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 45445a7d977cSHong Zhang ncols_d = ai[i+1] - ai[i]; 454501b7ae99SHong Zhang /* off-diagonal portion of A */ 45465a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 45475a7d977cSHong Zhang col = cmap[*bj]; 45485a7d977cSHong Zhang if (col >= cstart) break; 45495a7d977cSHong Zhang cj[k] = col; bj++; 45505a7d977cSHong Zhang ca[k++] = *ba++; 45515a7d977cSHong Zhang } 45525a7d977cSHong Zhang /* diagonal portion of A */ 45535a7d977cSHong Zhang for (j=0; j<ncols_d; j++) { 45545a7d977cSHong Zhang cj[k] = cstart + *aj++; 45555a7d977cSHong Zhang ca[k++] = *aa++; 45565a7d977cSHong Zhang } 45575a7d977cSHong Zhang /* off-diagonal portion of A */ 45585a7d977cSHong Zhang for (j=jo; j<ncols_o; j++) { 45595a7d977cSHong Zhang cj[k] = cmap[*bj++]; 45605a7d977cSHong Zhang ca[k++] = *ba++; 45615a7d977cSHong Zhang } 456225616d81SHong Zhang } 4563dea91ad1SHong Zhang /* put together the new matrix */ 4564d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,am,A->cmap->N,ci,cj,ca,A_loc);CHKERRQ(ierr); 4565dea91ad1SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4566dea91ad1SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 4567dea91ad1SHong Zhang mat = (Mat_SeqAIJ*)(*A_loc)->data; 4568e6b907acSBarry Smith mat->free_a = PETSC_TRUE; 4569e6b907acSBarry Smith mat->free_ij = PETSC_TRUE; 4570dea91ad1SHong Zhang mat->nonew = 0; 45715a7d977cSHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 45725a7d977cSHong Zhang mat=(Mat_SeqAIJ*)(*A_loc)->data; 4573a77337e4SBarry Smith ci = mat->i; cj = mat->j; cam = mat->a; 45745a7d977cSHong Zhang for (i=0; i<am; i++) { 45755a7d977cSHong Zhang /* off-diagonal portion of A */ 45765a7d977cSHong Zhang ncols_o = bi[i+1] - bi[i]; 45775a7d977cSHong Zhang for (jo=0; jo<ncols_o; jo++) { 45785a7d977cSHong Zhang col = cmap[*bj]; 45795a7d977cSHong Zhang if (col >= cstart) break; 4580a77337e4SBarry Smith *cam++ = *ba++; bj++; 45815a7d977cSHong Zhang } 45825a7d977cSHong Zhang /* diagonal portion of A */ 4583ecc9b87dSHong Zhang ncols_d = ai[i+1] - ai[i]; 4584a77337e4SBarry Smith for (j=0; j<ncols_d; j++) *cam++ = *aa++; 45855a7d977cSHong Zhang /* off-diagonal portion of A */ 4586f33d1a9aSHong Zhang for (j=jo; j<ncols_o; j++) { 4587a77337e4SBarry Smith *cam++ = *ba++; bj++; 4588f33d1a9aSHong Zhang } 45895a7d977cSHong Zhang } 45905a7d977cSHong Zhang } else { 4591e32f2f54SBarry Smith SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"Invalid MatReuse %d",(int)scall); 459225616d81SHong Zhang } 459301b7ae99SHong Zhang 45944ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmat,A,0,0,0);CHKERRQ(ierr); 459525616d81SHong Zhang PetscFunctionReturn(0); 459625616d81SHong Zhang } 459725616d81SHong Zhang 459832fba14fSHong Zhang #undef __FUNCT__ 459932fba14fSHong Zhang #define __FUNCT__ "MatGetLocalMatCondensed" 460032fba14fSHong Zhang /*@C 460132fba14fSHong Zhang MatGetLocalMatCondensed - Creates a SeqAIJ matrix by taking all its local rows and NON-ZERO columns 460232fba14fSHong Zhang 460332fba14fSHong Zhang Not Collective 460432fba14fSHong Zhang 460532fba14fSHong Zhang Input Parameters: 460632fba14fSHong Zhang + A - the matrix 460732fba14fSHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 460832fba14fSHong Zhang - row, col - index sets of rows and columns to extract (or PETSC_NULL) 460932fba14fSHong Zhang 461032fba14fSHong Zhang Output Parameter: 461132fba14fSHong Zhang . A_loc - the local sequential matrix generated 461232fba14fSHong Zhang 461332fba14fSHong Zhang Level: developer 461432fba14fSHong Zhang 461532fba14fSHong Zhang @*/ 4616be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetLocalMatCondensed(Mat A,MatReuse scall,IS *row,IS *col,Mat *A_loc) 461732fba14fSHong Zhang { 461832fba14fSHong Zhang Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 461932fba14fSHong Zhang PetscErrorCode ierr; 462032fba14fSHong Zhang PetscInt i,start,end,ncols,nzA,nzB,*cmap,imark,*idx; 462132fba14fSHong Zhang IS isrowa,iscola; 462232fba14fSHong Zhang Mat *aloc; 462332fba14fSHong Zhang 462432fba14fSHong Zhang PetscFunctionBegin; 46254ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 462632fba14fSHong Zhang if (!row){ 4627d0f46423SBarry Smith start = A->rmap->rstart; end = A->rmap->rend; 462832fba14fSHong Zhang ierr = ISCreateStride(PETSC_COMM_SELF,end-start,start,1,&isrowa);CHKERRQ(ierr); 462932fba14fSHong Zhang } else { 463032fba14fSHong Zhang isrowa = *row; 463132fba14fSHong Zhang } 463232fba14fSHong Zhang if (!col){ 4633d0f46423SBarry Smith start = A->cmap->rstart; 463432fba14fSHong Zhang cmap = a->garray; 4635d0f46423SBarry Smith nzA = a->A->cmap->n; 4636d0f46423SBarry Smith nzB = a->B->cmap->n; 463732fba14fSHong Zhang ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 463832fba14fSHong Zhang ncols = 0; 463932fba14fSHong Zhang for (i=0; i<nzB; i++) { 464032fba14fSHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 464132fba14fSHong Zhang else break; 464232fba14fSHong Zhang } 464332fba14fSHong Zhang imark = i; 464432fba14fSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; 464532fba14fSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; 4646d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&iscola);CHKERRQ(ierr); 464732fba14fSHong Zhang } else { 464832fba14fSHong Zhang iscola = *col; 464932fba14fSHong Zhang } 465032fba14fSHong Zhang if (scall != MAT_INITIAL_MATRIX){ 465132fba14fSHong Zhang ierr = PetscMalloc(sizeof(Mat),&aloc);CHKERRQ(ierr); 465232fba14fSHong Zhang aloc[0] = *A_loc; 465332fba14fSHong Zhang } 465432fba14fSHong Zhang ierr = MatGetSubMatrices(A,1,&isrowa,&iscola,scall,&aloc);CHKERRQ(ierr); 465532fba14fSHong Zhang *A_loc = aloc[0]; 465632fba14fSHong Zhang ierr = PetscFree(aloc);CHKERRQ(ierr); 465732fba14fSHong Zhang if (!row){ 465832fba14fSHong Zhang ierr = ISDestroy(isrowa);CHKERRQ(ierr); 465932fba14fSHong Zhang } 466032fba14fSHong Zhang if (!col){ 466132fba14fSHong Zhang ierr = ISDestroy(iscola);CHKERRQ(ierr); 466232fba14fSHong Zhang } 46634ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_Getlocalmatcondensed,A,0,0,0);CHKERRQ(ierr); 466432fba14fSHong Zhang PetscFunctionReturn(0); 466532fba14fSHong Zhang } 466632fba14fSHong Zhang 466725616d81SHong Zhang #undef __FUNCT__ 466825616d81SHong Zhang #define __FUNCT__ "MatGetBrowsOfAcols" 466925616d81SHong Zhang /*@C 467032fba14fSHong Zhang MatGetBrowsOfAcols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns of local A 467125616d81SHong Zhang 467225616d81SHong Zhang Collective on Mat 467325616d81SHong Zhang 467425616d81SHong Zhang Input Parameters: 4675e240928fSHong Zhang + A,B - the matrices in mpiaij format 467625616d81SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 467725616d81SHong Zhang - rowb, colb - index sets of rows and columns of B to extract (or PETSC_NULL) 467825616d81SHong Zhang 467925616d81SHong Zhang Output Parameter: 468025616d81SHong Zhang + rowb, colb - index sets of rows and columns of B to extract 4681d0f46423SBarry Smith . brstart - row index of B_seq from which next B->rmap->n rows are taken from B's local rows 468225616d81SHong Zhang - B_seq - the sequential matrix generated 468325616d81SHong Zhang 468425616d81SHong Zhang Level: developer 468525616d81SHong Zhang 468625616d81SHong Zhang @*/ 4687be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAcols(Mat A,Mat B,MatReuse scall,IS *rowb,IS *colb,PetscInt *brstart,Mat *B_seq) 468825616d81SHong Zhang { 4689899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 469025616d81SHong Zhang PetscErrorCode ierr; 4691b1d57f15SBarry Smith PetscInt *idx,i,start,ncols,nzA,nzB,*cmap,imark; 469225616d81SHong Zhang IS isrowb,iscolb; 469325616d81SHong Zhang Mat *bseq; 469425616d81SHong Zhang 469525616d81SHong Zhang PetscFunctionBegin; 4696d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4697e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%D, %D) != (%D,%D)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 469825616d81SHong Zhang } 46994ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 470025616d81SHong Zhang 470125616d81SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4702d0f46423SBarry Smith start = A->cmap->rstart; 470325616d81SHong Zhang cmap = a->garray; 4704d0f46423SBarry Smith nzA = a->A->cmap->n; 4705d0f46423SBarry Smith nzB = a->B->cmap->n; 4706b1d57f15SBarry Smith ierr = PetscMalloc((nzA+nzB)*sizeof(PetscInt), &idx);CHKERRQ(ierr); 470725616d81SHong Zhang ncols = 0; 47080390132cSHong Zhang for (i=0; i<nzB; i++) { /* row < local row index */ 470925616d81SHong Zhang if (cmap[i] < start) idx[ncols++] = cmap[i]; 471025616d81SHong Zhang else break; 471125616d81SHong Zhang } 471225616d81SHong Zhang imark = i; 47130390132cSHong Zhang for (i=0; i<nzA; i++) idx[ncols++] = start + i; /* local rows */ 47140390132cSHong Zhang for (i=imark; i<nzB; i++) idx[ncols++] = cmap[i]; /* row > local row index */ 4715d67e408aSBarry Smith ierr = ISCreateGeneral(PETSC_COMM_SELF,ncols,idx,PETSC_OWN_POINTER,&isrowb);CHKERRQ(ierr); 471625616d81SHong Zhang *brstart = imark; 4717d0f46423SBarry Smith ierr = ISCreateStride(PETSC_COMM_SELF,B->cmap->N,0,1,&iscolb);CHKERRQ(ierr); 471825616d81SHong Zhang } else { 4719e32f2f54SBarry Smith if (!rowb || !colb) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_SUP,"IS rowb and colb must be provided for MAT_REUSE_MATRIX"); 472025616d81SHong Zhang isrowb = *rowb; iscolb = *colb; 472125616d81SHong Zhang ierr = PetscMalloc(sizeof(Mat),&bseq);CHKERRQ(ierr); 472225616d81SHong Zhang bseq[0] = *B_seq; 472325616d81SHong Zhang } 472425616d81SHong Zhang ierr = MatGetSubMatrices(B,1,&isrowb,&iscolb,scall,&bseq);CHKERRQ(ierr); 472525616d81SHong Zhang *B_seq = bseq[0]; 472625616d81SHong Zhang ierr = PetscFree(bseq);CHKERRQ(ierr); 472725616d81SHong Zhang if (!rowb){ 472825616d81SHong Zhang ierr = ISDestroy(isrowb);CHKERRQ(ierr); 472925616d81SHong Zhang } else { 473025616d81SHong Zhang *rowb = isrowb; 473125616d81SHong Zhang } 473225616d81SHong Zhang if (!colb){ 473325616d81SHong Zhang ierr = ISDestroy(iscolb);CHKERRQ(ierr); 473425616d81SHong Zhang } else { 473525616d81SHong Zhang *colb = iscolb; 473625616d81SHong Zhang } 47374ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAcols,A,B,0,0);CHKERRQ(ierr); 473825616d81SHong Zhang PetscFunctionReturn(0); 473925616d81SHong Zhang } 4740429d309bSHong Zhang 4741a61c8c0fSHong Zhang #undef __FUNCT__ 4742a61c8c0fSHong Zhang #define __FUNCT__ "MatGetBrowsOfAoCols" 4743429d309bSHong Zhang /*@C 4744429d309bSHong Zhang MatGetBrowsOfAoCols - Creates a SeqAIJ matrix by taking rows of B that equal to nonzero columns 474501b7ae99SHong Zhang of the OFF-DIAGONAL portion of local A 4746429d309bSHong Zhang 4747429d309bSHong Zhang Collective on Mat 4748429d309bSHong Zhang 4749429d309bSHong Zhang Input Parameters: 4750429d309bSHong Zhang + A,B - the matrices in mpiaij format 475187025532SHong Zhang . scall - either MAT_INITIAL_MATRIX or MAT_REUSE_MATRIX 475287025532SHong Zhang . startsj - starting point in B's sending and receiving j-arrays, saved for MAT_REUSE (or PETSC_NULL) 47531d79065fSBarry Smith . startsj_r - similar to startsj for receives 475487025532SHong Zhang - bufa_ptr - array for sending matrix values, saved for MAT_REUSE (or PETSC_NULL) 4755429d309bSHong Zhang 4756429d309bSHong Zhang Output Parameter: 475787025532SHong Zhang + B_oth - the sequential matrix generated 4758429d309bSHong Zhang 4759429d309bSHong Zhang Level: developer 4760429d309bSHong Zhang 4761429d309bSHong Zhang @*/ 47621d79065fSBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatGetBrowsOfAoCols(Mat A,Mat B,MatReuse scall,PetscInt **startsj,PetscInt **startsj_r,MatScalar **bufa_ptr,Mat *B_oth) 4763429d309bSHong Zhang { 4764a6b2eed2SHong Zhang VecScatter_MPI_General *gen_to,*gen_from; 4765429d309bSHong Zhang PetscErrorCode ierr; 4766899cda47SBarry Smith Mat_MPIAIJ *a=(Mat_MPIAIJ*)A->data; 476787025532SHong Zhang Mat_SeqAIJ *b_oth; 4768a6b2eed2SHong Zhang VecScatter ctx=a->Mvctx; 47697adad957SLisandro Dalcin MPI_Comm comm=((PetscObject)ctx)->comm; 47707adad957SLisandro Dalcin PetscMPIInt *rprocs,*sprocs,tag=((PetscObject)ctx)->tag,rank; 4771d0f46423SBarry Smith PetscInt *rowlen,*bufj,*bufJ,ncols,aBn=a->B->cmap->n,row,*b_othi,*b_othj; 4772dd6ea824SBarry Smith PetscScalar *rvalues,*svalues; 4773dd6ea824SBarry Smith MatScalar *b_otha,*bufa,*bufA; 4774e42f35eeSHong Zhang PetscInt i,j,k,l,ll,nrecvs,nsends,nrows,*srow,*rstarts,*rstartsj = 0,*sstarts,*sstartsj,len; 4775910ba992SMatthew Knepley MPI_Request *rwaits = PETSC_NULL,*swaits = PETSC_NULL; 477687025532SHong Zhang MPI_Status *sstatus,rstatus; 4777aa5bb8c0SSatish Balay PetscMPIInt jj; 4778e42f35eeSHong Zhang PetscInt *cols,sbs,rbs; 4779ba8c8a56SBarry Smith PetscScalar *vals; 4780429d309bSHong Zhang 4781429d309bSHong Zhang PetscFunctionBegin; 4782d0f46423SBarry Smith if (A->cmap->rstart != B->rmap->rstart || A->cmap->rend != B->rmap->rend){ 4783e32f2f54SBarry Smith SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"Matrix local dimensions are incompatible, (%d, %d) != (%d,%d)",A->cmap->rstart,A->cmap->rend,B->rmap->rstart,B->rmap->rend); 4784429d309bSHong Zhang } 47854ebed01fSBarry Smith ierr = PetscLogEventBegin(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4786a6b2eed2SHong Zhang ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr); 4787a6b2eed2SHong Zhang 4788a6b2eed2SHong Zhang gen_to = (VecScatter_MPI_General*)ctx->todata; 4789a6b2eed2SHong Zhang gen_from = (VecScatter_MPI_General*)ctx->fromdata; 4790e42f35eeSHong Zhang rvalues = gen_from->values; /* holds the length of receiving row */ 4791e42f35eeSHong Zhang svalues = gen_to->values; /* holds the length of sending row */ 4792a6b2eed2SHong Zhang nrecvs = gen_from->n; 4793a6b2eed2SHong Zhang nsends = gen_to->n; 4794d7ee0231SBarry Smith 4795d7ee0231SBarry Smith ierr = PetscMalloc2(nrecvs,MPI_Request,&rwaits,nsends,MPI_Request,&swaits);CHKERRQ(ierr); 4796a6b2eed2SHong Zhang srow = gen_to->indices; /* local row index to be sent */ 4797a6b2eed2SHong Zhang sstarts = gen_to->starts; 4798a6b2eed2SHong Zhang sprocs = gen_to->procs; 4799a6b2eed2SHong Zhang sstatus = gen_to->sstatus; 4800e42f35eeSHong Zhang sbs = gen_to->bs; 4801e42f35eeSHong Zhang rstarts = gen_from->starts; 4802e42f35eeSHong Zhang rprocs = gen_from->procs; 4803e42f35eeSHong Zhang rbs = gen_from->bs; 4804429d309bSHong Zhang 4805dea91ad1SHong Zhang if (!startsj || !bufa_ptr) scall = MAT_INITIAL_MATRIX; 4806429d309bSHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4807a6b2eed2SHong Zhang /* i-array */ 4808a6b2eed2SHong Zhang /*---------*/ 4809a6b2eed2SHong Zhang /* post receives */ 4810a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4811e42f35eeSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4812e42f35eeSHong Zhang nrows = (rstarts[i+1]-rstarts[i])*rbs; /* num of indices to be received */ 481387025532SHong Zhang ierr = MPI_Irecv(rowlen,nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4814429d309bSHong Zhang } 4815a6b2eed2SHong Zhang 4816a6b2eed2SHong Zhang /* pack the outgoing message */ 48171d79065fSBarry Smith ierr = PetscMalloc2(nsends+1,PetscInt,&sstartsj,nrecvs+1,PetscInt,&rstartsj);CHKERRQ(ierr); 4818a6b2eed2SHong Zhang sstartsj[0] = 0; rstartsj[0] = 0; 4819a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be sent */ 4820a6b2eed2SHong Zhang k = 0; 4821a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4822e42f35eeSHong Zhang rowlen = (PetscInt*)svalues + sstarts[i]*sbs; 4823e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 482487025532SHong Zhang for (j=0; j<nrows; j++) { 4825d0f46423SBarry Smith row = srow[k] + B->rmap->range[rank]; /* global row idx */ 4826e42f35eeSHong Zhang for (l=0; l<sbs; l++){ 4827e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); /* rowlength */ 4828e42f35eeSHong Zhang rowlen[j*sbs+l] = ncols; 4829e42f35eeSHong Zhang len += ncols; 4830e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+l,&ncols,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr); 4831e42f35eeSHong Zhang } 4832a6b2eed2SHong Zhang k++; 4833429d309bSHong Zhang } 4834e42f35eeSHong Zhang ierr = MPI_Isend(rowlen,nrows*sbs,MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4835dea91ad1SHong Zhang sstartsj[i+1] = len; /* starting point of (i+1)-th outgoing msg in bufj and bufa */ 4836429d309bSHong Zhang } 483787025532SHong Zhang /* recvs and sends of i-array are completed */ 483887025532SHong Zhang i = nrecvs; 483987025532SHong Zhang while (i--) { 4840aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 484187025532SHong Zhang } 48420c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4843e42f35eeSHong Zhang 4844a6b2eed2SHong Zhang /* allocate buffers for sending j and a arrays */ 4845a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscInt),&bufj);CHKERRQ(ierr); 4846a6b2eed2SHong Zhang ierr = PetscMalloc((len+1)*sizeof(PetscScalar),&bufa);CHKERRQ(ierr); 4847a6b2eed2SHong Zhang 484887025532SHong Zhang /* create i-array of B_oth */ 484987025532SHong Zhang ierr = PetscMalloc((aBn+2)*sizeof(PetscInt),&b_othi);CHKERRQ(ierr); 485087025532SHong Zhang b_othi[0] = 0; 4851a6b2eed2SHong Zhang len = 0; /* total length of j or a array to be received */ 4852a6b2eed2SHong Zhang k = 0; 4853a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 4854fd0ff01cSHong Zhang rowlen = (PetscInt*)rvalues + rstarts[i]*rbs; 4855e42f35eeSHong Zhang nrows = rbs*(rstarts[i+1]-rstarts[i]); /* num of rows to be recieved */ 485687025532SHong Zhang for (j=0; j<nrows; j++) { 485787025532SHong Zhang b_othi[k+1] = b_othi[k] + rowlen[j]; 4858a6b2eed2SHong Zhang len += rowlen[j]; k++; 4859a6b2eed2SHong Zhang } 4860dea91ad1SHong Zhang rstartsj[i+1] = len; /* starting point of (i+1)-th incoming msg in bufj and bufa */ 4861a6b2eed2SHong Zhang } 4862a6b2eed2SHong Zhang 486387025532SHong Zhang /* allocate space for j and a arrrays of B_oth */ 486487025532SHong Zhang ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(PetscInt),&b_othj);CHKERRQ(ierr); 4865dd6ea824SBarry Smith ierr = PetscMalloc((b_othi[aBn]+1)*sizeof(MatScalar),&b_otha);CHKERRQ(ierr); 4866a6b2eed2SHong Zhang 486787025532SHong Zhang /* j-array */ 486887025532SHong Zhang /*---------*/ 4869a6b2eed2SHong Zhang /* post receives of j-array */ 4870a6b2eed2SHong Zhang for (i=0; i<nrecvs; i++){ 487187025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 487287025532SHong Zhang ierr = MPI_Irecv(b_othj+rstartsj[i],nrows,MPIU_INT,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 4873a6b2eed2SHong Zhang } 4874e42f35eeSHong Zhang 4875e42f35eeSHong Zhang /* pack the outgoing message j-array */ 4876a6b2eed2SHong Zhang k = 0; 4877a6b2eed2SHong Zhang for (i=0; i<nsends; i++){ 4878e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 4879a6b2eed2SHong Zhang bufJ = bufj+sstartsj[i]; 488087025532SHong Zhang for (j=0; j<nrows; j++) { 4881d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4882e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4883e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4884a6b2eed2SHong Zhang for (l=0; l<ncols; l++){ 4885a6b2eed2SHong Zhang *bufJ++ = cols[l]; 488687025532SHong Zhang } 4887e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,&cols,PETSC_NULL);CHKERRQ(ierr); 4888e42f35eeSHong Zhang } 488987025532SHong Zhang } 489087025532SHong Zhang ierr = MPI_Isend(bufj+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_INT,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 489187025532SHong Zhang } 489287025532SHong Zhang 489387025532SHong Zhang /* recvs and sends of j-array are completed */ 489487025532SHong Zhang i = nrecvs; 489587025532SHong Zhang while (i--) { 4896aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 489787025532SHong Zhang } 48980c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 489987025532SHong Zhang } else if (scall == MAT_REUSE_MATRIX){ 490087025532SHong Zhang sstartsj = *startsj; 49011d79065fSBarry Smith rstartsj = *startsj_r; 490287025532SHong Zhang bufa = *bufa_ptr; 490387025532SHong Zhang b_oth = (Mat_SeqAIJ*)(*B_oth)->data; 490487025532SHong Zhang b_otha = b_oth->a; 490587025532SHong Zhang } else { 4906e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE, "Matrix P does not posses an object container"); 490787025532SHong Zhang } 490887025532SHong Zhang 490987025532SHong Zhang /* a-array */ 491087025532SHong Zhang /*---------*/ 491187025532SHong Zhang /* post receives of a-array */ 491287025532SHong Zhang for (i=0; i<nrecvs; i++){ 491387025532SHong Zhang nrows = rstartsj[i+1]-rstartsj[i]; /* length of the msg received */ 491487025532SHong Zhang ierr = MPI_Irecv(b_otha+rstartsj[i],nrows,MPIU_SCALAR,rprocs[i],tag,comm,rwaits+i);CHKERRQ(ierr); 491587025532SHong Zhang } 4916e42f35eeSHong Zhang 4917e42f35eeSHong Zhang /* pack the outgoing message a-array */ 491887025532SHong Zhang k = 0; 491987025532SHong Zhang for (i=0; i<nsends; i++){ 4920e42f35eeSHong Zhang nrows = sstarts[i+1]-sstarts[i]; /* num of block rows */ 492187025532SHong Zhang bufA = bufa+sstartsj[i]; 492287025532SHong Zhang for (j=0; j<nrows; j++) { 4923d0f46423SBarry Smith row = srow[k++] + B->rmap->range[rank]; /* global row idx */ 4924e42f35eeSHong Zhang for (ll=0; ll<sbs; ll++){ 4925e42f35eeSHong Zhang ierr = MatGetRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 492687025532SHong Zhang for (l=0; l<ncols; l++){ 4927a6b2eed2SHong Zhang *bufA++ = vals[l]; 4928a6b2eed2SHong Zhang } 4929e42f35eeSHong Zhang ierr = MatRestoreRow_MPIAIJ(B,row+ll,&ncols,PETSC_NULL,&vals);CHKERRQ(ierr); 4930e42f35eeSHong Zhang } 4931a6b2eed2SHong Zhang } 493287025532SHong Zhang ierr = MPI_Isend(bufa+sstartsj[i],sstartsj[i+1]-sstartsj[i],MPIU_SCALAR,sprocs[i],tag,comm,swaits+i);CHKERRQ(ierr); 4933a6b2eed2SHong Zhang } 493487025532SHong Zhang /* recvs and sends of a-array are completed */ 493587025532SHong Zhang i = nrecvs; 493687025532SHong Zhang while (i--) { 4937aa5bb8c0SSatish Balay ierr = MPI_Waitany(nrecvs,rwaits,&jj,&rstatus);CHKERRQ(ierr); 493887025532SHong Zhang } 49390c468ba9SBarry Smith if (nsends) {ierr = MPI_Waitall(nsends,swaits,sstatus);CHKERRQ(ierr);} 4940d7ee0231SBarry Smith ierr = PetscFree2(rwaits,swaits);CHKERRQ(ierr); 4941a6b2eed2SHong Zhang 494287025532SHong Zhang if (scall == MAT_INITIAL_MATRIX){ 4943a6b2eed2SHong Zhang /* put together the new matrix */ 4944d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,aBn,B->cmap->N,b_othi,b_othj,b_otha,B_oth);CHKERRQ(ierr); 4945a6b2eed2SHong Zhang 4946a6b2eed2SHong Zhang /* MatCreateSeqAIJWithArrays flags matrix so PETSc doesn't free the user's arrays. */ 4947a6b2eed2SHong Zhang /* Since these are PETSc arrays, change flags to free them as necessary. */ 494887025532SHong Zhang b_oth = (Mat_SeqAIJ *)(*B_oth)->data; 4949e6b907acSBarry Smith b_oth->free_a = PETSC_TRUE; 4950e6b907acSBarry Smith b_oth->free_ij = PETSC_TRUE; 495187025532SHong Zhang b_oth->nonew = 0; 4952a6b2eed2SHong Zhang 4953a6b2eed2SHong Zhang ierr = PetscFree(bufj);CHKERRQ(ierr); 4954dea91ad1SHong Zhang if (!startsj || !bufa_ptr){ 49551d79065fSBarry Smith ierr = PetscFree2(sstartsj,rstartsj);CHKERRQ(ierr); 4956dea91ad1SHong Zhang ierr = PetscFree(bufa_ptr);CHKERRQ(ierr); 4957dea91ad1SHong Zhang } else { 495887025532SHong Zhang *startsj = sstartsj; 49591d79065fSBarry Smith *startsj_r = rstartsj; 496087025532SHong Zhang *bufa_ptr = bufa; 496187025532SHong Zhang } 4962dea91ad1SHong Zhang } 49634ebed01fSBarry Smith ierr = PetscLogEventEnd(MAT_GetBrowsOfAocols,A,B,0,0);CHKERRQ(ierr); 4964429d309bSHong Zhang PetscFunctionReturn(0); 4965429d309bSHong Zhang } 4966ccd8e176SBarry Smith 496743eb5e2fSMatthew Knepley #undef __FUNCT__ 496843eb5e2fSMatthew Knepley #define __FUNCT__ "MatGetCommunicationStructs" 496943eb5e2fSMatthew Knepley /*@C 497043eb5e2fSMatthew Knepley MatGetCommunicationStructs - Provides access to the communication structures used in matrix-vector multiplication. 497143eb5e2fSMatthew Knepley 497243eb5e2fSMatthew Knepley Not Collective 497343eb5e2fSMatthew Knepley 497443eb5e2fSMatthew Knepley Input Parameters: 497543eb5e2fSMatthew Knepley . A - The matrix in mpiaij format 497643eb5e2fSMatthew Knepley 497743eb5e2fSMatthew Knepley Output Parameter: 497843eb5e2fSMatthew Knepley + lvec - The local vector holding off-process values from the argument to a matrix-vector product 497943eb5e2fSMatthew Knepley . colmap - A map from global column index to local index into lvec 498043eb5e2fSMatthew Knepley - multScatter - A scatter from the argument of a matrix-vector product to lvec 498143eb5e2fSMatthew Knepley 498243eb5e2fSMatthew Knepley Level: developer 498343eb5e2fSMatthew Knepley 498443eb5e2fSMatthew Knepley @*/ 498543eb5e2fSMatthew Knepley #if defined (PETSC_USE_CTABLE) 498643eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscTable *colmap, VecScatter *multScatter) 498743eb5e2fSMatthew Knepley #else 498843eb5e2fSMatthew Knepley PetscErrorCode PETSCMAT_DLLEXPORT MatGetCommunicationStructs(Mat A, Vec *lvec, PetscInt *colmap[], VecScatter *multScatter) 498943eb5e2fSMatthew Knepley #endif 499043eb5e2fSMatthew Knepley { 499143eb5e2fSMatthew Knepley Mat_MPIAIJ *a; 499243eb5e2fSMatthew Knepley 499343eb5e2fSMatthew Knepley PetscFunctionBegin; 49940700a824SBarry Smith PetscValidHeaderSpecific(A, MAT_CLASSID, 1); 4995e414b56bSJed Brown PetscValidPointer(lvec, 2); 4996e414b56bSJed Brown PetscValidPointer(colmap, 3); 4997e414b56bSJed Brown PetscValidPointer(multScatter, 4); 499843eb5e2fSMatthew Knepley a = (Mat_MPIAIJ *) A->data; 499943eb5e2fSMatthew Knepley if (lvec) *lvec = a->lvec; 500043eb5e2fSMatthew Knepley if (colmap) *colmap = a->colmap; 500143eb5e2fSMatthew Knepley if (multScatter) *multScatter = a->Mvctx; 500243eb5e2fSMatthew Knepley PetscFunctionReturn(0); 500343eb5e2fSMatthew Knepley } 500443eb5e2fSMatthew Knepley 500517667f90SBarry Smith EXTERN_C_BEGIN 50065a11e1b2SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPIAIJCRL(Mat,const MatType,MatReuse,Mat*); 50075a11e1b2SBarry Smith extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPIAIJPERM(Mat,const MatType,MatReuse,Mat*); 5008c4688eafSJed Brown extern PetscErrorCode PETSCMAT_DLLEXPORT MatConvert_MPIAIJ_MPISBAIJ(Mat,const MatType,MatReuse,Mat*); 500917667f90SBarry Smith EXTERN_C_END 501017667f90SBarry Smith 5011fc4dec0aSBarry Smith #undef __FUNCT__ 5012fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultNumeric_MPIDense_MPIAIJ" 5013fc4dec0aSBarry Smith /* 5014fc4dec0aSBarry Smith Computes (B'*A')' since computing B*A directly is untenable 5015fc4dec0aSBarry Smith 5016fc4dec0aSBarry Smith n p p 5017fc4dec0aSBarry Smith ( ) ( ) ( ) 5018fc4dec0aSBarry Smith m ( A ) * n ( B ) = m ( C ) 5019fc4dec0aSBarry Smith ( ) ( ) ( ) 5020fc4dec0aSBarry Smith 5021fc4dec0aSBarry Smith */ 5022fc4dec0aSBarry Smith PetscErrorCode MatMatMultNumeric_MPIDense_MPIAIJ(Mat A,Mat B,Mat C) 5023fc4dec0aSBarry Smith { 5024fc4dec0aSBarry Smith PetscErrorCode ierr; 5025fc4dec0aSBarry Smith Mat At,Bt,Ct; 5026fc4dec0aSBarry Smith 5027fc4dec0aSBarry Smith PetscFunctionBegin; 5028fc4dec0aSBarry Smith ierr = MatTranspose(A,MAT_INITIAL_MATRIX,&At);CHKERRQ(ierr); 5029fc4dec0aSBarry Smith ierr = MatTranspose(B,MAT_INITIAL_MATRIX,&Bt);CHKERRQ(ierr); 5030fc4dec0aSBarry Smith ierr = MatMatMult(Bt,At,MAT_INITIAL_MATRIX,1.0,&Ct);CHKERRQ(ierr); 5031fc4dec0aSBarry Smith ierr = MatDestroy(At);CHKERRQ(ierr); 5032fc4dec0aSBarry Smith ierr = MatDestroy(Bt);CHKERRQ(ierr); 5033fc4dec0aSBarry Smith ierr = MatTranspose(Ct,MAT_REUSE_MATRIX,&C);CHKERRQ(ierr); 5034e5e4356aSBarry Smith ierr = MatDestroy(Ct);CHKERRQ(ierr); 5035fc4dec0aSBarry Smith PetscFunctionReturn(0); 5036fc4dec0aSBarry Smith } 5037fc4dec0aSBarry Smith 5038fc4dec0aSBarry Smith #undef __FUNCT__ 5039fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMultSymbolic_MPIDense_MPIAIJ" 5040fc4dec0aSBarry Smith PetscErrorCode MatMatMultSymbolic_MPIDense_MPIAIJ(Mat A,Mat B,PetscReal fill,Mat *C) 5041fc4dec0aSBarry Smith { 5042fc4dec0aSBarry Smith PetscErrorCode ierr; 5043d0f46423SBarry Smith PetscInt m=A->rmap->n,n=B->cmap->n; 5044fc4dec0aSBarry Smith Mat Cmat; 5045fc4dec0aSBarry Smith 5046fc4dec0aSBarry Smith PetscFunctionBegin; 5047e32f2f54SBarry Smith if (A->cmap->n != B->rmap->n) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_SIZ,"A->cmap->n %d != B->rmap->n %d\n",A->cmap->n,B->rmap->n); 504839804f7cSBarry Smith ierr = MatCreate(((PetscObject)A)->comm,&Cmat);CHKERRQ(ierr); 5049fc4dec0aSBarry Smith ierr = MatSetSizes(Cmat,m,n,PETSC_DETERMINE,PETSC_DETERMINE);CHKERRQ(ierr); 5050fc4dec0aSBarry Smith ierr = MatSetType(Cmat,MATMPIDENSE);CHKERRQ(ierr); 5051fc4dec0aSBarry Smith ierr = MatMPIDenseSetPreallocation(Cmat,PETSC_NULL);CHKERRQ(ierr); 505238556019SBarry Smith ierr = MatAssemblyBegin(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 505338556019SBarry Smith ierr = MatAssemblyEnd(Cmat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 5054fc4dec0aSBarry Smith *C = Cmat; 5055fc4dec0aSBarry Smith PetscFunctionReturn(0); 5056fc4dec0aSBarry Smith } 5057fc4dec0aSBarry Smith 5058fc4dec0aSBarry Smith /* ----------------------------------------------------------------*/ 5059fc4dec0aSBarry Smith #undef __FUNCT__ 5060fc4dec0aSBarry Smith #define __FUNCT__ "MatMatMult_MPIDense_MPIAIJ" 5061fc4dec0aSBarry Smith PetscErrorCode MatMatMult_MPIDense_MPIAIJ(Mat A,Mat B,MatReuse scall,PetscReal fill,Mat *C) 5062fc4dec0aSBarry Smith { 5063fc4dec0aSBarry Smith PetscErrorCode ierr; 5064fc4dec0aSBarry Smith 5065fc4dec0aSBarry Smith PetscFunctionBegin; 5066fc4dec0aSBarry Smith if (scall == MAT_INITIAL_MATRIX){ 5067fc4dec0aSBarry Smith ierr = MatMatMultSymbolic_MPIDense_MPIAIJ(A,B,fill,C);CHKERRQ(ierr); 5068fc4dec0aSBarry Smith } 5069fc4dec0aSBarry Smith ierr = MatMatMultNumeric_MPIDense_MPIAIJ(A,B,*C);CHKERRQ(ierr); 5070fc4dec0aSBarry Smith PetscFunctionReturn(0); 5071fc4dec0aSBarry Smith } 5072fc4dec0aSBarry Smith 50735c9eb25fSBarry Smith EXTERN_C_BEGIN 5074611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5075bccb9932SShri Abhyankar extern PetscErrorCode MatGetFactor_aij_mumps(Mat,MatFactorType,Mat*); 5076611f576cSBarry Smith #endif 50773bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 50783bf14a46SMatthew Knepley extern PetscErrorCode MatGetFactor_mpiaij_pastix(Mat,MatFactorType,Mat*); 50793bf14a46SMatthew Knepley #endif 5080611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 50815c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_superlu_dist(Mat,MatFactorType,Mat*); 5082611f576cSBarry Smith #endif 5083611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 50845c9eb25fSBarry Smith extern PetscErrorCode MatGetFactor_mpiaij_spooles(Mat,MatFactorType,Mat*); 5085611f576cSBarry Smith #endif 50865c9eb25fSBarry Smith EXTERN_C_END 50875c9eb25fSBarry Smith 5088ccd8e176SBarry Smith /*MC 5089ccd8e176SBarry Smith MATMPIAIJ - MATMPIAIJ = "mpiaij" - A matrix type to be used for parallel sparse matrices. 5090ccd8e176SBarry Smith 5091ccd8e176SBarry Smith Options Database Keys: 5092ccd8e176SBarry Smith . -mat_type mpiaij - sets the matrix type to "mpiaij" during a call to MatSetFromOptions() 5093ccd8e176SBarry Smith 5094ccd8e176SBarry Smith Level: beginner 5095ccd8e176SBarry Smith 5096175b88e8SBarry Smith .seealso: MatCreateMPIAIJ() 5097ccd8e176SBarry Smith M*/ 5098ccd8e176SBarry Smith 5099ccd8e176SBarry Smith EXTERN_C_BEGIN 5100ccd8e176SBarry Smith #undef __FUNCT__ 5101ccd8e176SBarry Smith #define __FUNCT__ "MatCreate_MPIAIJ" 5102be1d678aSKris Buschelman PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAIJ(Mat B) 5103ccd8e176SBarry Smith { 5104ccd8e176SBarry Smith Mat_MPIAIJ *b; 5105ccd8e176SBarry Smith PetscErrorCode ierr; 5106ccd8e176SBarry Smith PetscMPIInt size; 5107ccd8e176SBarry Smith 5108ccd8e176SBarry Smith PetscFunctionBegin; 51097adad957SLisandro Dalcin ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr); 5110ccd8e176SBarry Smith 511138f2d2fdSLisandro Dalcin ierr = PetscNewLog(B,Mat_MPIAIJ,&b);CHKERRQ(ierr); 5112ccd8e176SBarry Smith B->data = (void*)b; 5113ccd8e176SBarry Smith ierr = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr); 5114d0f46423SBarry Smith B->rmap->bs = 1; 5115ccd8e176SBarry Smith B->assembled = PETSC_FALSE; 5116ccd8e176SBarry Smith B->mapping = 0; 5117ccd8e176SBarry Smith 5118ccd8e176SBarry Smith B->insertmode = NOT_SET_VALUES; 5119ccd8e176SBarry Smith b->size = size; 51207adad957SLisandro Dalcin ierr = MPI_Comm_rank(((PetscObject)B)->comm,&b->rank);CHKERRQ(ierr); 5121ccd8e176SBarry Smith 5122ccd8e176SBarry Smith /* build cache for off array entries formed */ 51237adad957SLisandro Dalcin ierr = MatStashCreate_Private(((PetscObject)B)->comm,1,&B->stash);CHKERRQ(ierr); 5124ccd8e176SBarry Smith b->donotstash = PETSC_FALSE; 5125ccd8e176SBarry Smith b->colmap = 0; 5126ccd8e176SBarry Smith b->garray = 0; 5127ccd8e176SBarry Smith b->roworiented = PETSC_TRUE; 5128ccd8e176SBarry Smith 5129ccd8e176SBarry Smith /* stuff used for matrix vector multiply */ 5130ccd8e176SBarry Smith b->lvec = PETSC_NULL; 5131ccd8e176SBarry Smith b->Mvctx = PETSC_NULL; 5132ccd8e176SBarry Smith 5133ccd8e176SBarry Smith /* stuff for MatGetRow() */ 5134ccd8e176SBarry Smith b->rowindices = 0; 5135ccd8e176SBarry Smith b->rowvalues = 0; 5136ccd8e176SBarry Smith b->getrowactive = PETSC_FALSE; 5137ccd8e176SBarry Smith 5138611f576cSBarry Smith #if defined(PETSC_HAVE_SPOOLES) 5139ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_spooles_C", 51405c9eb25fSBarry Smith "MatGetFactor_mpiaij_spooles", 51415c9eb25fSBarry Smith MatGetFactor_mpiaij_spooles);CHKERRQ(ierr); 5142611f576cSBarry Smith #endif 5143611f576cSBarry Smith #if defined(PETSC_HAVE_MUMPS) 5144ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_mumps_C", 5145bccb9932SShri Abhyankar "MatGetFactor_aij_mumps", 5146bccb9932SShri Abhyankar MatGetFactor_aij_mumps);CHKERRQ(ierr); 5147611f576cSBarry Smith #endif 51483bf14a46SMatthew Knepley #if defined(PETSC_HAVE_PASTIX) 5149ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_pastix_C", 51503bf14a46SMatthew Knepley "MatGetFactor_mpiaij_pastix", 51513bf14a46SMatthew Knepley MatGetFactor_mpiaij_pastix);CHKERRQ(ierr); 51523bf14a46SMatthew Knepley #endif 5153611f576cSBarry Smith #if defined(PETSC_HAVE_SUPERLU_DIST) 5154ec1065edSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetFactor_superlu_dist_C", 51555c9eb25fSBarry Smith "MatGetFactor_mpiaij_superlu_dist", 51565c9eb25fSBarry Smith MatGetFactor_mpiaij_superlu_dist);CHKERRQ(ierr); 5157611f576cSBarry Smith #endif 5158ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatStoreValues_C", 5159ccd8e176SBarry Smith "MatStoreValues_MPIAIJ", 5160ccd8e176SBarry Smith MatStoreValues_MPIAIJ);CHKERRQ(ierr); 5161ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatRetrieveValues_C", 5162ccd8e176SBarry Smith "MatRetrieveValues_MPIAIJ", 5163ccd8e176SBarry Smith MatRetrieveValues_MPIAIJ);CHKERRQ(ierr); 5164ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatGetDiagonalBlock_C", 5165ccd8e176SBarry Smith "MatGetDiagonalBlock_MPIAIJ", 5166ccd8e176SBarry Smith MatGetDiagonalBlock_MPIAIJ);CHKERRQ(ierr); 5167ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatIsTranspose_C", 5168ccd8e176SBarry Smith "MatIsTranspose_MPIAIJ", 5169ccd8e176SBarry Smith MatIsTranspose_MPIAIJ);CHKERRQ(ierr); 5170ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocation_C", 5171ccd8e176SBarry Smith "MatMPIAIJSetPreallocation_MPIAIJ", 5172ccd8e176SBarry Smith MatMPIAIJSetPreallocation_MPIAIJ);CHKERRQ(ierr); 5173ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAIJSetPreallocationCSR_C", 5174ccd8e176SBarry Smith "MatMPIAIJSetPreallocationCSR_MPIAIJ", 5175ccd8e176SBarry Smith MatMPIAIJSetPreallocationCSR_MPIAIJ);CHKERRQ(ierr); 5176ccd8e176SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatDiagonalScaleLocal_C", 5177ccd8e176SBarry Smith "MatDiagonalScaleLocal_MPIAIJ", 5178ccd8e176SBarry Smith MatDiagonalScaleLocal_MPIAIJ);CHKERRQ(ierr); 51795a11e1b2SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpiaijperm_C", 51805a11e1b2SBarry Smith "MatConvert_MPIAIJ_MPIAIJPERM", 51815a11e1b2SBarry Smith MatConvert_MPIAIJ_MPIAIJPERM);CHKERRQ(ierr); 51825a11e1b2SBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpiaijcrl_C", 51835a11e1b2SBarry Smith "MatConvert_MPIAIJ_MPIAIJCRL", 51845a11e1b2SBarry Smith MatConvert_MPIAIJ_MPIAIJCRL);CHKERRQ(ierr); 5185471cc821SHong Zhang ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatConvert_mpiaij_mpisbaij_C", 5186471cc821SHong Zhang "MatConvert_MPIAIJ_MPISBAIJ", 5187471cc821SHong Zhang MatConvert_MPIAIJ_MPISBAIJ);CHKERRQ(ierr); 5188fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMult_mpidense_mpiaij_C", 5189fc4dec0aSBarry Smith "MatMatMult_MPIDense_MPIAIJ", 5190fc4dec0aSBarry Smith MatMatMult_MPIDense_MPIAIJ);CHKERRQ(ierr); 5191fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultSymbolic_mpidense_mpiaij_C", 5192fc4dec0aSBarry Smith "MatMatMultSymbolic_MPIDense_MPIAIJ", 5193fc4dec0aSBarry Smith MatMatMultSymbolic_MPIDense_MPIAIJ);CHKERRQ(ierr); 5194fc4dec0aSBarry Smith ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMatMultNumeric_mpidense_mpiaij_C", 5195fc4dec0aSBarry Smith "MatMatMultNumeric_MPIDense_MPIAIJ", 5196fc4dec0aSBarry Smith MatMatMultNumeric_MPIDense_MPIAIJ);CHKERRQ(ierr); 519717667f90SBarry Smith ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIAIJ);CHKERRQ(ierr); 5198ccd8e176SBarry Smith PetscFunctionReturn(0); 5199ccd8e176SBarry Smith } 5200ccd8e176SBarry Smith EXTERN_C_END 520181824310SBarry Smith 520203bfb495SBarry Smith #undef __FUNCT__ 520303bfb495SBarry Smith #define __FUNCT__ "MatCreateMPIAIJWithSplitArrays" 520458d36128SBarry Smith /*@ 520503bfb495SBarry Smith MatCreateMPIAIJWithSplitArrays - creates a MPI AIJ matrix using arrays that contain the "diagonal" 520603bfb495SBarry Smith and "off-diagonal" part of the matrix in CSR format. 520703bfb495SBarry Smith 520803bfb495SBarry Smith Collective on MPI_Comm 520903bfb495SBarry Smith 521003bfb495SBarry Smith Input Parameters: 521103bfb495SBarry Smith + comm - MPI communicator 521203bfb495SBarry Smith . m - number of local rows (Cannot be PETSC_DECIDE) 521303bfb495SBarry Smith . n - This value should be the same as the local size used in creating the 521403bfb495SBarry Smith x vector for the matrix-vector product y = Ax. (or PETSC_DECIDE to have 521503bfb495SBarry Smith calculated if N is given) For square matrices n is almost always m. 521603bfb495SBarry Smith . M - number of global rows (or PETSC_DETERMINE to have calculated if m is given) 521703bfb495SBarry Smith . N - number of global columns (or PETSC_DETERMINE to have calculated if n is given) 521803bfb495SBarry Smith . i - row indices for "diagonal" portion of matrix 521903bfb495SBarry Smith . j - column indices 522003bfb495SBarry Smith . a - matrix values 522103bfb495SBarry Smith . oi - row indices for "off-diagonal" portion of matrix 522203bfb495SBarry Smith . oj - column indices 522303bfb495SBarry Smith - oa - matrix values 522403bfb495SBarry Smith 522503bfb495SBarry Smith Output Parameter: 522603bfb495SBarry Smith . mat - the matrix 522703bfb495SBarry Smith 522803bfb495SBarry Smith Level: advanced 522903bfb495SBarry Smith 523003bfb495SBarry Smith Notes: 523103bfb495SBarry Smith The i, j, and a arrays ARE NOT copied by this routine into the internal format used by PETSc. 523203bfb495SBarry Smith 523303bfb495SBarry Smith The i and j indices are 0 based 523403bfb495SBarry Smith 523503bfb495SBarry Smith See MatCreateMPIAIJ() for the definition of "diagonal" and "off-diagonal" portion of the matrix 523603bfb495SBarry Smith 52377b55108eSBarry Smith This sets local rows and cannot be used to set off-processor values. 52387b55108eSBarry Smith 52397b55108eSBarry Smith You cannot later use MatSetValues() to change values in this matrix. 524003bfb495SBarry Smith 524103bfb495SBarry Smith .keywords: matrix, aij, compressed row, sparse, parallel 524203bfb495SBarry Smith 524303bfb495SBarry Smith .seealso: MatCreate(), MatCreateSeqAIJ(), MatSetValues(), MatMPIAIJSetPreallocation(), MatMPIAIJSetPreallocationCSR(), 52448d7a6e47SBarry Smith MPIAIJ, MatCreateMPIAIJ(), MatCreateMPIAIJWithArrays() 524503bfb495SBarry Smith @*/ 52468d7a6e47SBarry Smith PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAIJWithSplitArrays(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt M,PetscInt N,PetscInt i[],PetscInt j[],PetscScalar a[], 524703bfb495SBarry Smith PetscInt oi[], PetscInt oj[],PetscScalar oa[],Mat *mat) 524803bfb495SBarry Smith { 524903bfb495SBarry Smith PetscErrorCode ierr; 525003bfb495SBarry Smith Mat_MPIAIJ *maij; 525103bfb495SBarry Smith 525203bfb495SBarry Smith PetscFunctionBegin; 5253e32f2f54SBarry Smith if (m < 0) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"local number of rows (m) cannot be PETSC_DECIDE, or negative"); 525403bfb495SBarry Smith if (i[0]) { 5255e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i (row indices) must start with 0"); 525603bfb495SBarry Smith } 525703bfb495SBarry Smith if (oi[0]) { 5258e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"oi (row indices) must start with 0"); 525903bfb495SBarry Smith } 526003bfb495SBarry Smith ierr = MatCreate(comm,mat);CHKERRQ(ierr); 526103bfb495SBarry Smith ierr = MatSetSizes(*mat,m,n,M,N);CHKERRQ(ierr); 526203bfb495SBarry Smith ierr = MatSetType(*mat,MATMPIAIJ);CHKERRQ(ierr); 526303bfb495SBarry Smith maij = (Mat_MPIAIJ*) (*mat)->data; 52648d7a6e47SBarry Smith maij->donotstash = PETSC_TRUE; 52658d7a6e47SBarry Smith (*mat)->preallocated = PETSC_TRUE; 526603bfb495SBarry Smith 526726283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->rmap,1);CHKERRQ(ierr); 526826283091SBarry Smith ierr = PetscLayoutSetBlockSize((*mat)->cmap,1);CHKERRQ(ierr); 526926283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->rmap);CHKERRQ(ierr); 527026283091SBarry Smith ierr = PetscLayoutSetUp((*mat)->cmap);CHKERRQ(ierr); 527103bfb495SBarry Smith 527203bfb495SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,n,i,j,a,&maij->A);CHKERRQ(ierr); 5273d0f46423SBarry Smith ierr = MatCreateSeqAIJWithArrays(PETSC_COMM_SELF,m,(*mat)->cmap->N,oi,oj,oa,&maij->B);CHKERRQ(ierr); 527403bfb495SBarry Smith 52758d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52768d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->A,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52778d7a6e47SBarry Smith ierr = MatAssemblyBegin(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52788d7a6e47SBarry Smith ierr = MatAssemblyEnd(maij->B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 52798d7a6e47SBarry Smith 528003bfb495SBarry Smith ierr = MatAssemblyBegin(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 528103bfb495SBarry Smith ierr = MatAssemblyEnd(*mat,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr); 528203bfb495SBarry Smith PetscFunctionReturn(0); 528303bfb495SBarry Smith } 528403bfb495SBarry Smith 528581824310SBarry Smith /* 528681824310SBarry Smith Special version for direct calls from Fortran 528781824310SBarry Smith */ 528881824310SBarry Smith #if defined(PETSC_HAVE_FORTRAN_CAPS) 528981824310SBarry Smith #define matsetvaluesmpiaij_ MATSETVALUESMPIAIJ 529081824310SBarry Smith #elif !defined(PETSC_HAVE_FORTRAN_UNDERSCORE) 529181824310SBarry Smith #define matsetvaluesmpiaij_ matsetvaluesmpiaij 529281824310SBarry Smith #endif 529381824310SBarry Smith 529481824310SBarry Smith /* Change these macros so can be used in void function */ 529581824310SBarry Smith #undef CHKERRQ 5296e32f2f54SBarry Smith #define CHKERRQ(ierr) CHKERRABORT(PETSC_COMM_WORLD,ierr) 529781824310SBarry Smith #undef SETERRQ2 5298e32f2f54SBarry Smith #define SETERRQ2(comm,ierr,b,c,d) CHKERRABORT(comm,ierr) 529981824310SBarry Smith #undef SETERRQ 5300e32f2f54SBarry Smith #define SETERRQ(c,ierr,b) CHKERRABORT(c,ierr) 530181824310SBarry Smith 530281824310SBarry Smith EXTERN_C_BEGIN 530381824310SBarry Smith #undef __FUNCT__ 530481824310SBarry Smith #define __FUNCT__ "matsetvaluesmpiaij_" 53051f6cc5b2SSatish Balay void PETSC_STDCALL matsetvaluesmpiaij_(Mat *mmat,PetscInt *mm,const PetscInt im[],PetscInt *mn,const PetscInt in[],const PetscScalar v[],InsertMode *maddv,PetscErrorCode *_ierr) 530681824310SBarry Smith { 530781824310SBarry Smith Mat mat = *mmat; 530881824310SBarry Smith PetscInt m = *mm, n = *mn; 530981824310SBarry Smith InsertMode addv = *maddv; 531081824310SBarry Smith Mat_MPIAIJ *aij = (Mat_MPIAIJ*)mat->data; 531181824310SBarry Smith PetscScalar value; 531281824310SBarry Smith PetscErrorCode ierr; 5313899cda47SBarry Smith 5314d9e2c085SLisandro Dalcin ierr = MatPreallocated(mat);CHKERRQ(ierr); 531581824310SBarry Smith if (mat->insertmode == NOT_SET_VALUES) { 531681824310SBarry Smith mat->insertmode = addv; 531781824310SBarry Smith } 531881824310SBarry Smith #if defined(PETSC_USE_DEBUG) 531981824310SBarry Smith else if (mat->insertmode != addv) { 5320e32f2f54SBarry Smith SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Cannot mix add values and insert values"); 532181824310SBarry Smith } 532281824310SBarry Smith #endif 532381824310SBarry Smith { 5324d0f46423SBarry Smith PetscInt i,j,rstart = mat->rmap->rstart,rend = mat->rmap->rend; 5325d0f46423SBarry Smith PetscInt cstart = mat->cmap->rstart,cend = mat->cmap->rend,row,col; 5326ace3abfcSBarry Smith PetscBool roworiented = aij->roworiented; 532781824310SBarry Smith 532881824310SBarry Smith /* Some Variables required in the macro */ 532981824310SBarry Smith Mat A = aij->A; 533081824310SBarry Smith Mat_SeqAIJ *a = (Mat_SeqAIJ*)A->data; 533181824310SBarry Smith PetscInt *aimax = a->imax,*ai = a->i,*ailen = a->ilen,*aj = a->j; 5332dd6ea824SBarry Smith MatScalar *aa = a->a; 5333ace3abfcSBarry Smith PetscBool ignorezeroentries = (((a->ignorezeroentries)&&(addv==ADD_VALUES))?PETSC_TRUE:PETSC_FALSE); 533481824310SBarry Smith Mat B = aij->B; 533581824310SBarry Smith Mat_SeqAIJ *b = (Mat_SeqAIJ*)B->data; 5336d0f46423SBarry Smith PetscInt *bimax = b->imax,*bi = b->i,*bilen = b->ilen,*bj = b->j,bm = aij->B->rmap->n,am = aij->A->rmap->n; 5337dd6ea824SBarry Smith MatScalar *ba = b->a; 533881824310SBarry Smith 533981824310SBarry Smith PetscInt *rp1,*rp2,ii,nrow1,nrow2,_i,rmax1,rmax2,N,low1,high1,low2,high2,t,lastcol1,lastcol2; 534081824310SBarry Smith PetscInt nonew = a->nonew; 5341dd6ea824SBarry Smith MatScalar *ap1,*ap2; 534281824310SBarry Smith 534381824310SBarry Smith PetscFunctionBegin; 534481824310SBarry Smith for (i=0; i<m; i++) { 534581824310SBarry Smith if (im[i] < 0) continue; 534681824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5347e32f2f54SBarry Smith if (im[i] >= mat->rmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row too large: row %D max %D",im[i],mat->rmap->N-1); 534881824310SBarry Smith #endif 534981824310SBarry Smith if (im[i] >= rstart && im[i] < rend) { 535081824310SBarry Smith row = im[i] - rstart; 535181824310SBarry Smith lastcol1 = -1; 535281824310SBarry Smith rp1 = aj + ai[row]; 535381824310SBarry Smith ap1 = aa + ai[row]; 535481824310SBarry Smith rmax1 = aimax[row]; 535581824310SBarry Smith nrow1 = ailen[row]; 535681824310SBarry Smith low1 = 0; 535781824310SBarry Smith high1 = nrow1; 535881824310SBarry Smith lastcol2 = -1; 535981824310SBarry Smith rp2 = bj + bi[row]; 536081824310SBarry Smith ap2 = ba + bi[row]; 536181824310SBarry Smith rmax2 = bimax[row]; 536281824310SBarry Smith nrow2 = bilen[row]; 536381824310SBarry Smith low2 = 0; 536481824310SBarry Smith high2 = nrow2; 536581824310SBarry Smith 536681824310SBarry Smith for (j=0; j<n; j++) { 536781824310SBarry Smith if (roworiented) value = v[i*n+j]; else value = v[i+j*m]; 536881824310SBarry Smith if (ignorezeroentries && value == 0.0 && (addv == ADD_VALUES)) continue; 536981824310SBarry Smith if (in[j] >= cstart && in[j] < cend){ 537081824310SBarry Smith col = in[j] - cstart; 537181824310SBarry Smith MatSetValues_SeqAIJ_A_Private(row,col,value,addv); 537281824310SBarry Smith } else if (in[j] < 0) continue; 537381824310SBarry Smith #if defined(PETSC_USE_DEBUG) 5374cb9801acSJed Brown else if (in[j] >= mat->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column too large: col %D max %D",in[j],mat->cmap->N-1); 537581824310SBarry Smith #endif 537681824310SBarry Smith else { 537781824310SBarry Smith if (mat->was_assembled) { 537881824310SBarry Smith if (!aij->colmap) { 537981824310SBarry Smith ierr = CreateColmap_MPIAIJ_Private(mat);CHKERRQ(ierr); 538081824310SBarry Smith } 538181824310SBarry Smith #if defined (PETSC_USE_CTABLE) 538281824310SBarry Smith ierr = PetscTableFind(aij->colmap,in[j]+1,&col);CHKERRQ(ierr); 538381824310SBarry Smith col--; 538481824310SBarry Smith #else 538581824310SBarry Smith col = aij->colmap[in[j]] - 1; 538681824310SBarry Smith #endif 538781824310SBarry Smith if (col < 0 && !((Mat_SeqAIJ*)(aij->A->data))->nonew) { 538881824310SBarry Smith ierr = DisAssemble_MPIAIJ(mat);CHKERRQ(ierr); 538981824310SBarry Smith col = in[j]; 539081824310SBarry Smith /* Reinitialize the variables required by MatSetValues_SeqAIJ_B_Private() */ 539181824310SBarry Smith B = aij->B; 539281824310SBarry Smith b = (Mat_SeqAIJ*)B->data; 539381824310SBarry Smith bimax = b->imax; bi = b->i; bilen = b->ilen; bj = b->j; 539481824310SBarry Smith rp2 = bj + bi[row]; 539581824310SBarry Smith ap2 = ba + bi[row]; 539681824310SBarry Smith rmax2 = bimax[row]; 539781824310SBarry Smith nrow2 = bilen[row]; 539881824310SBarry Smith low2 = 0; 539981824310SBarry Smith high2 = nrow2; 5400d0f46423SBarry Smith bm = aij->B->rmap->n; 540181824310SBarry Smith ba = b->a; 540281824310SBarry Smith } 540381824310SBarry Smith } else col = in[j]; 540481824310SBarry Smith MatSetValues_SeqAIJ_B_Private(row,col,value,addv); 540581824310SBarry Smith } 540681824310SBarry Smith } 540781824310SBarry Smith } else { 540881824310SBarry Smith if (!aij->donotstash) { 540981824310SBarry Smith if (roworiented) { 5410ace3abfcSBarry Smith ierr = MatStashValuesRow_Private(&mat->stash,im[i],n,in,v+i*n,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 541181824310SBarry Smith } else { 5412ace3abfcSBarry Smith ierr = MatStashValuesCol_Private(&mat->stash,im[i],n,in,v+i,m,(PetscBool)(ignorezeroentries && (addv == ADD_VALUES)));CHKERRQ(ierr); 541381824310SBarry Smith } 541481824310SBarry Smith } 541581824310SBarry Smith } 541681824310SBarry Smith }} 541781824310SBarry Smith PetscFunctionReturnVoid(); 541881824310SBarry Smith } 541981824310SBarry Smith EXTERN_C_END 542003bfb495SBarry Smith 5421