xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 0170919e3807ff82287a31d6cf5edc59527b75e2)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
18   PetscLayout        rmap;
19   MPI_Comm           comm;
20   PetscSF            sf;
21   PetscSFNode       *iremote;
22   PetscBool          done;
23   PetscErrorCode     ierr;
24 
25   PetscFunctionBegin;
26   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
27   ierr = MatGetLayouts(adj,&rmap,NULL);CHKERRQ(ierr);
28   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
29   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
30   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++){
34     owner = -1;
35     rlocalindex = -1;
36     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
41   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++){
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
47   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
48   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
49   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
50   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
51   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
52   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
53   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
54   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++){
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for(i=0; i<nlrows_mat; i++){
62     Ncols_send += ncols_send[i];
63   }
64   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
65   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++){
69     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
70     for (j=0; j<ncols_recv[i]; j++){
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
76   /*if we need to deal with edge weights ???*/
77   if (a->values){isvalue=1;} else {isvalue=0;}
78   /*involve a global communication */
79   /*ierr = MPIU_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
80   if (isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
81   nroots = Ncols_send;
82   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
83   ierr = PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
84   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
85   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
86   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
87   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
88   if (isvalue){
89     ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
90     ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
91   }
92   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
93   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
94   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
95   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
96   rnclos = 0;
97   for (i=0; i<nlrows_is; i++){
98     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
99       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
100       if (loc<0){
101         adjncy_recv[j] = -1;
102         if (isvalue) values_recv[j] = -1;
103         ncols_recv[i]--;
104       } else {
105         rnclos++;
106       }
107     }
108   }
109   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
110   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
111   if (isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
112   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
113   rnclos = 0;
114   for(i=0; i<nlrows_is; i++){
115     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
116       if (adjncy_recv[j]<0) continue;
117       sadjncy[rnclos] = adjncy_recv[j];
118       if (isvalue) svalues[rnclos] = values_recv[j];
119       rnclos++;
120     }
121   }
122   for(i=0; i<nlrows_is; i++){
123     sxadj[i+1] = sxadj[i]+ncols_recv[i];
124   }
125   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
126   if (sadj_adjncy){ *sadj_adjncy = sadjncy;} else { ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
127   if (sadj_values){
128     if (isvalue) *sadj_values = svalues; else *sadj_values=0;
129   } else {
130     if (isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
131   }
132   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
133   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
134   if (isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
135   PetscFunctionReturn(0);
136 }
137 
138 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
139 {
140   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
141   PetscInt          *indices,nindx,j,k,loc;
142   PetscMPIInt        issame;
143   const PetscInt    *irow_indices,*icol_indices;
144   MPI_Comm           scomm_row,scomm_col,scomm_mat;
145   PetscErrorCode     ierr;
146 
147   PetscFunctionBegin;
148   nindx = 0;
149   /*
150    * Estimate a maximum number for allocating memory
151    */
152   for(i=0; i<n; i++){
153     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
154     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
155     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
156   }
157   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
158   /* construct a submat */
159   for(i=0; i<n; i++){
160     if (subcomm){
161       ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
162       ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
163       ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
164       if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
165       ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
166       if (issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
167     } else {
168       scomm_row = PETSC_COMM_SELF;
169     }
170     /*get sub-matrix data*/
171     sxadj=0; sadjncy=0; svalues=0;
172     ierr = MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
173     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
174     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
175     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
176     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
177     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
178     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
179     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
180     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
181     nindx = irow_n+icol_n;
182     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
183     /* renumber columns */
184     for(j=0; j<irow_n; j++){
185       for(k=sxadj[j]; k<sxadj[j+1]; k++){
186         ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
187 #if defined(PETSC_USE_DEBUG)
188         if (loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %D",sadjncy[k]);
189 #endif
190         sadjncy[k] = loc;
191       }
192     }
193     if (scall==MAT_INITIAL_MATRIX){
194       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
195     } else {
196        Mat                sadj = *(submat[i]);
197        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
198        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
199        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
200        if (issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
201        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
202        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
203        if (svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
204        ierr = PetscFree(sxadj);CHKERRQ(ierr);
205        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
206        if (svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
207     }
208   }
209   ierr = PetscFree(indices);CHKERRQ(ierr);
210   PetscFunctionReturn(0);
211 }
212 
213 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
214 {
215   PetscErrorCode     ierr;
216   /*get sub-matrices across a sub communicator */
217   PetscFunctionBegin;
218   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
219   PetscFunctionReturn(0);
220 }
221 
222 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
223 {
224   PetscErrorCode     ierr;
225 
226   PetscFunctionBegin;
227   /*get sub-matrices based on PETSC_COMM_SELF */
228   ierr = MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
229   PetscFunctionReturn(0);
230 }
231 
232 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
233 {
234   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
235   PetscErrorCode    ierr;
236   PetscInt          i,j,m = A->rmap->n;
237   const char        *name;
238   PetscViewerFormat format;
239 
240   PetscFunctionBegin;
241   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
242   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
243   if (format == PETSC_VIEWER_ASCII_INFO) {
244     PetscFunctionReturn(0);
245   } else if (format == PETSC_VIEWER_ASCII_MATLAB) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
246   else {
247     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
248     ierr = PetscViewerASCIIPushSynchronized(viewer);CHKERRQ(ierr);
249     for (i=0; i<m; i++) {
250       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
251       for (j=a->i[i]; j<a->i[i+1]; j++) {
252         if (a->values) {
253           ierr = PetscViewerASCIISynchronizedPrintf(viewer," (%D, %D) ",a->j[j], a->values[j]);CHKERRQ(ierr);
254         } else {
255           ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
256         }
257       }
258       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
259     }
260     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
261     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
262     ierr = PetscViewerASCIIPopSynchronized(viewer);CHKERRQ(ierr);
263   }
264   PetscFunctionReturn(0);
265 }
266 
267 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
268 {
269   PetscErrorCode ierr;
270   PetscBool      iascii;
271 
272   PetscFunctionBegin;
273   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
274   if (iascii) {
275     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
276   }
277   PetscFunctionReturn(0);
278 }
279 
280 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
281 {
282   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
283   PetscErrorCode ierr;
284 
285   PetscFunctionBegin;
286 #if defined(PETSC_USE_LOG)
287   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
288 #endif
289   ierr = PetscFree(a->diag);CHKERRQ(ierr);
290   if (a->freeaij) {
291     if (a->freeaijwithfree) {
292       if (a->i) free(a->i);
293       if (a->j) free(a->j);
294     } else {
295       ierr = PetscFree(a->i);CHKERRQ(ierr);
296       ierr = PetscFree(a->j);CHKERRQ(ierr);
297       ierr = PetscFree(a->values);CHKERRQ(ierr);
298     }
299   }
300   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
301   ierr = PetscFree(mat->data);CHKERRQ(ierr);
302   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
303   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
304   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
305   PetscFunctionReturn(0);
306 }
307 
308 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
309 {
310   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
311   PetscErrorCode ierr;
312 
313   PetscFunctionBegin;
314   switch (op) {
315   case MAT_SYMMETRIC:
316   case MAT_STRUCTURALLY_SYMMETRIC:
317   case MAT_HERMITIAN:
318     a->symmetric = flg;
319     break;
320   case MAT_SYMMETRY_ETERNAL:
321     break;
322   default:
323     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
324     break;
325   }
326   PetscFunctionReturn(0);
327 }
328 
329 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
330 {
331   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
332   PetscErrorCode ierr;
333 
334   PetscFunctionBegin;
335   row -= A->rmap->rstart;
336   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
337   *nz = a->i[row+1] - a->i[row];
338   if (v) {
339     PetscInt j;
340     if (a->rowvalues_alloc < *nz) {
341       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
342       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
343       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
344     }
345     for (j=0; j<*nz; j++){
346       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
347     }
348     *v = (*nz) ? a->rowvalues : NULL;
349   }
350   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
351   PetscFunctionReturn(0);
352 }
353 
354 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
355 {
356   PetscFunctionBegin;
357   PetscFunctionReturn(0);
358 }
359 
360 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
361 {
362   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
363   PetscErrorCode ierr;
364   PetscBool      flag;
365 
366   PetscFunctionBegin;
367   /* If the  matrix dimensions are not equal,or no of nonzeros */
368   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
369     flag = PETSC_FALSE;
370   }
371 
372   /* if the a->i are the same */
373   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
374 
375   /* if a->j are the same */
376   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
377 
378   ierr = MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
379   PetscFunctionReturn(0);
380 }
381 
382 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
383 {
384   PetscInt   i;
385   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
386   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
387 
388   PetscFunctionBegin;
389   *m    = A->rmap->n;
390   *ia   = a->i;
391   *ja   = a->j;
392   *done = PETSC_TRUE;
393   if (oshift) {
394     for (i=0; i<(*ia)[*m]; i++) {
395       (*ja)[i]++;
396     }
397     for (i=0; i<=(*m); i++) (*ia)[i]++;
398   }
399   PetscFunctionReturn(0);
400 }
401 
402 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
403 {
404   PetscInt   i;
405   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
406   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
407 
408   PetscFunctionBegin;
409   if (ia && a->i != *ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
410   if (ja && a->j != *ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
411   if (oshift) {
412     if (!ia) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
413     if (!ja) SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
414     for (i=0; i<=(*m); i++) (*ia)[i]--;
415     for (i=0; i<(*ia)[*m]; i++) {
416       (*ja)[i]--;
417     }
418   }
419   PetscFunctionReturn(0);
420 }
421 
422 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
423 {
424   Mat               B;
425   PetscErrorCode    ierr;
426   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
427   const PetscInt    *rj;
428   const PetscScalar *ra;
429   MPI_Comm          comm;
430 
431   PetscFunctionBegin;
432   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
433   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
434   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
435 
436   /* count the number of nonzeros per row */
437   for (i=0; i<m; i++) {
438     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
439     for (j=0; j<len; j++) {
440       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
441     }
442     nzeros += len;
443     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
444   }
445 
446   /* malloc space for nonzeros */
447   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
448   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
449   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
450 
451   nzeros = 0;
452   ia[0]  = 0;
453   for (i=0; i<m; i++) {
454     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
455     cnt  = 0;
456     for (j=0; j<len; j++) {
457       if (rj[j] != i+rstart) { /* if not diagonal */
458         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
459         ja[nzeros+cnt++] = rj[j];
460       }
461     }
462     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
463     nzeros += cnt;
464     ia[i+1] = nzeros;
465   }
466 
467   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
468   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
469   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
470   ierr = MatSetType(B,type);CHKERRQ(ierr);
471   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
472 
473   if (reuse == MAT_INPLACE_MATRIX) {
474     ierr = MatHeaderReplace(A,&B);CHKERRQ(ierr);
475   } else {
476     *newmat = B;
477   }
478   PetscFunctionReturn(0);
479 }
480 
481 /* -------------------------------------------------------------------*/
482 static struct _MatOps MatOps_Values = {0,
483                                        MatGetRow_MPIAdj,
484                                        MatRestoreRow_MPIAdj,
485                                        0,
486                                 /* 4*/ 0,
487                                        0,
488                                        0,
489                                        0,
490                                        0,
491                                        0,
492                                 /*10*/ 0,
493                                        0,
494                                        0,
495                                        0,
496                                        0,
497                                 /*15*/ 0,
498                                        MatEqual_MPIAdj,
499                                        0,
500                                        0,
501                                        0,
502                                 /*20*/ 0,
503                                        0,
504                                        MatSetOption_MPIAdj,
505                                        0,
506                                 /*24*/ 0,
507                                        0,
508                                        0,
509                                        0,
510                                        0,
511                                 /*29*/ 0,
512                                        0,
513                                        0,
514                                        0,
515                                        0,
516                                 /*34*/ 0,
517                                        0,
518                                        0,
519                                        0,
520                                        0,
521                                 /*39*/ 0,
522                                        MatCreateSubMatrices_MPIAdj,
523                                        0,
524                                        0,
525                                        0,
526                                 /*44*/ 0,
527                                        0,
528                                        MatShift_Basic,
529                                        0,
530                                        0,
531                                 /*49*/ 0,
532                                        MatGetRowIJ_MPIAdj,
533                                        MatRestoreRowIJ_MPIAdj,
534                                        0,
535                                        0,
536                                 /*54*/ 0,
537                                        0,
538                                        0,
539                                        0,
540                                        0,
541                                 /*59*/ 0,
542                                        MatDestroy_MPIAdj,
543                                        MatView_MPIAdj,
544                                        MatConvertFrom_MPIAdj,
545                                        0,
546                                 /*64*/ 0,
547                                        0,
548                                        0,
549                                        0,
550                                        0,
551                                 /*69*/ 0,
552                                        0,
553                                        0,
554                                        0,
555                                        0,
556                                 /*74*/ 0,
557                                        0,
558                                        0,
559                                        0,
560                                        0,
561                                 /*79*/ 0,
562                                        0,
563                                        0,
564                                        0,
565                                        0,
566                                 /*84*/ 0,
567                                        0,
568                                        0,
569                                        0,
570                                        0,
571                                 /*89*/ 0,
572                                        0,
573                                        0,
574                                        0,
575                                        0,
576                                 /*94*/ 0,
577                                        0,
578                                        0,
579                                        0,
580                                        0,
581                                 /*99*/ 0,
582                                        0,
583                                        0,
584                                        0,
585                                        0,
586                                /*104*/ 0,
587                                        0,
588                                        0,
589                                        0,
590                                        0,
591                                /*109*/ 0,
592                                        0,
593                                        0,
594                                        0,
595                                        0,
596                                /*114*/ 0,
597                                        0,
598                                        0,
599                                        0,
600                                        0,
601                                /*119*/ 0,
602                                        0,
603                                        0,
604                                        0,
605                                        0,
606                                /*124*/ 0,
607                                        0,
608                                        0,
609                                        0,
610                                        MatCreateSubMatricesMPI_MPIAdj,
611                                /*129*/ 0,
612                                        0,
613                                        0,
614                                        0,
615                                        0,
616                                /*134*/ 0,
617                                        0,
618                                        0,
619                                        0,
620                                        0,
621                                /*139*/ 0,
622                                        0,
623                                        0
624 };
625 
626 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
627 {
628   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
629   PetscErrorCode ierr;
630 #if defined(PETSC_USE_DEBUG)
631   PetscInt ii;
632 #endif
633 
634   PetscFunctionBegin;
635   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
636   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
637 
638 #if defined(PETSC_USE_DEBUG)
639   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
640   for (ii=1; ii<B->rmap->n; ii++) {
641     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
642   }
643   for (ii=0; ii<i[B->rmap->n]; ii++) {
644     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
645   }
646 #endif
647   B->preallocated = PETSC_TRUE;
648 
649   b->j      = j;
650   b->i      = i;
651   b->values = values;
652 
653   b->nz        = i[B->rmap->n];
654   b->diag      = 0;
655   b->symmetric = PETSC_FALSE;
656   b->freeaij   = PETSC_TRUE;
657 
658   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
659   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
660   PetscFunctionReturn(0);
661 }
662 
663 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
664 {
665   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
666   PetscErrorCode ierr;
667   const PetscInt *ranges;
668   MPI_Comm       acomm,bcomm;
669   MPI_Group      agroup,bgroup;
670   PetscMPIInt    i,rank,size,nranks,*ranks;
671 
672   PetscFunctionBegin;
673   *B    = NULL;
674   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
675   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
676   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
677   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
678   for (i=0,nranks=0; i<size; i++) {
679     if (ranges[i+1] - ranges[i] > 0) nranks++;
680   }
681   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
682     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
683     *B   = A;
684     PetscFunctionReturn(0);
685   }
686 
687   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
688   for (i=0,nranks=0; i<size; i++) {
689     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
690   }
691   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
692   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
693   ierr = PetscFree(ranks);CHKERRQ(ierr);
694   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
695   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
696   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
697   if (bcomm != MPI_COMM_NULL) {
698     PetscInt   m,N;
699     Mat_MPIAdj *b;
700     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
701     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
702     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
703     b          = (Mat_MPIAdj*)(*B)->data;
704     b->freeaij = PETSC_FALSE;
705     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
706   }
707   PetscFunctionReturn(0);
708 }
709 
710 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
711 {
712   PetscErrorCode ierr;
713   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
714   PetscInt       *Values = NULL;
715   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
716   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
717 
718   PetscFunctionBegin;
719   ierr = MPI_Comm_size(PetscObjectComm((PetscObject)A),&size);CHKERRQ(ierr);
720   ierr = MatGetSize(A,&M,&N);CHKERRQ(ierr);
721   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
722   nz   = adj->nz;
723   if (adj->i[m] != nz) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %D not correct i[m] %d",nz,adj->i[m]);
724   ierr = MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
725 
726   ierr = PetscMPIIntCast(nz,&mnz);CHKERRQ(ierr);
727   ierr = PetscCalloc2(size,&allnz,size,&dispnz);CHKERRQ(ierr);
728   ierr = MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
729   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
730   if (adj->values) {
731     ierr = PetscMalloc1(NZ,&Values);CHKERRQ(ierr);
732     ierr = MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
733   }
734   ierr = PetscMalloc1(NZ,&J);CHKERRQ(ierr);
735   ierr = MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
736   ierr = PetscFree2(allnz,dispnz);CHKERRQ(ierr);
737   ierr = MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
738   nzstart -= nz;
739   /* shift the i[] values so they will be correct after being received */
740   for (i=0; i<m; i++) adj->i[i] += nzstart;
741   ierr = PetscMalloc1(M+1,&II);CHKERRQ(ierr);
742   ierr = PetscMPIIntCast(m,&mm);CHKERRQ(ierr);
743   ierr = PetscMalloc2(size,&allm,size,&dispm);CHKERRQ(ierr);
744   ierr = MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
745   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
746   ierr = MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
747   ierr = PetscFree2(allm,dispm);CHKERRQ(ierr);
748   II[M] = NZ;
749   /* shift the i[] values back */
750   for (i=0; i<m; i++) adj->i[i] -= nzstart;
751   ierr = MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B);CHKERRQ(ierr);
752   PetscFunctionReturn(0);
753 }
754 
755 /*@
756    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
757 
758    Collective
759 
760    Input Arguments:
761 .  A - original MPIAdj matrix
762 
763    Output Arguments:
764 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
765 
766    Level: developer
767 
768    Note:
769    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
770 
771    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
772 
773 .seealso: MatCreateMPIAdj()
774 @*/
775 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
776 {
777   PetscErrorCode ierr;
778 
779   PetscFunctionBegin;
780   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
781   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
782   PetscFunctionReturn(0);
783 }
784 
785 /*MC
786    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
787    intended for use constructing orderings and partitionings.
788 
789   Level: beginner
790 
791 .seealso: MatCreateMPIAdj
792 M*/
793 
794 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
795 {
796   Mat_MPIAdj     *b;
797   PetscErrorCode ierr;
798 
799   PetscFunctionBegin;
800   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
801   B->data      = (void*)b;
802   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
803   B->assembled = PETSC_FALSE;
804 
805   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
806   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
807   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj);CHKERRQ(ierr);
808   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
809   PetscFunctionReturn(0);
810 }
811 
812 /*@C
813    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
814 
815    Logically Collective on MPI_Comm
816 
817    Input Parameter:
818 .  A - the matrix
819 
820    Output Parameter:
821 .  B - the same matrix on all processes
822 
823    Level: intermediate
824 
825 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
826 @*/
827 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
828 {
829   PetscErrorCode ierr;
830 
831   PetscFunctionBegin;
832   ierr = PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
833   PetscFunctionReturn(0);
834 }
835 
836 /*@C
837    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
838 
839    Logically Collective on MPI_Comm
840 
841    Input Parameters:
842 +  A - the matrix
843 .  i - the indices into j for the start of each row
844 .  j - the column indices for each row (sorted for each row).
845        The indices in i and j start with zero (NOT with one).
846 -  values - [optional] edge weights
847 
848    Level: intermediate
849 
850 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
851 @*/
852 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
853 {
854   PetscErrorCode ierr;
855 
856   PetscFunctionBegin;
857   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
858   PetscFunctionReturn(0);
859 }
860 
861 /*@C
862    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
863    The matrix does not have numerical values associated with it, but is
864    intended for ordering (to reduce bandwidth etc) and partitioning.
865 
866    Collective on MPI_Comm
867 
868    Input Parameters:
869 +  comm - MPI communicator
870 .  m - number of local rows
871 .  N - number of global columns
872 .  i - the indices into j for the start of each row
873 .  j - the column indices for each row (sorted for each row).
874        The indices in i and j start with zero (NOT with one).
875 -  values -[optional] edge weights
876 
877    Output Parameter:
878 .  A - the matrix
879 
880    Level: intermediate
881 
882    Notes:
883     This matrix object does not support most matrix operations, include
884    MatSetValues().
885    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
886    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
887     call from Fortran you need not create the arrays with PetscMalloc().
888    Should not include the matrix diagonals.
889 
890    If you already have a matrix, you can create its adjacency matrix by a call
891    to MatConvert, specifying a type of MATMPIADJ.
892 
893    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
894 
895 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
896 @*/
897 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
898 {
899   PetscErrorCode ierr;
900 
901   PetscFunctionBegin;
902   ierr = MatCreate(comm,A);CHKERRQ(ierr);
903   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
904   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
905   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
906   PetscFunctionReturn(0);
907 }
908 
909 
910 
911 
912 
913 
914 
915