xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 6a7b62d20411728549af5aecf0d2c6f16e43c92a)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 /*
9  * The interface should be easy to use for both MatCreateSubMatrix (parallel sub-matrix) and MatCreateSubMatrices (sequential sub-matrices)
10  * */
11 static PetscErrorCode MatCreateSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
12 {
13   PetscInt           nlrows_is,icols_n,i,j,nroots,nleaves,rlocalindex,*ncols_send,*ncols_recv;
14   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
15   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues;
16   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
17   PetscMPIInt        owner;
18   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
19   PetscLayout        rmap;
20   MPI_Comm           comm;
21   PetscSF            sf;
22   PetscSFNode       *iremote;
23   PetscBool          done;
24 
25   PetscFunctionBegin;
26   PetscCall(PetscObjectGetComm((PetscObject)adj,&comm));
27   PetscCall(MatGetLayouts(adj,&rmap,NULL));
28   PetscCall(ISGetLocalSize(irows,&nlrows_is));
29   PetscCall(ISGetIndices(irows,&irows_indices));
30   PetscCall(PetscMalloc1(nlrows_is,&iremote));
31   /* construct sf graph*/
32   nleaves = nlrows_is;
33   for (i=0; i<nlrows_is; i++) {
34     owner = -1;
35     rlocalindex = -1;
36     PetscCall(PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex));
37     iremote[i].rank  = owner;
38     iremote[i].index = rlocalindex;
39   }
40   PetscCall(MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
41   PetscCall(PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv));
42   nroots = nlrows_mat;
43   for (i=0; i<nlrows_mat; i++) {
44     ncols_send[i] = xadj[i+1]-xadj[i];
45   }
46   PetscCall(PetscSFCreate(comm,&sf));
47   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
48   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
49   PetscCall(PetscSFSetFromOptions(sf));
50   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
51   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv,MPI_REPLACE));
52   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
53   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv,MPI_REPLACE));
54   PetscCall(PetscSFDestroy(&sf));
55   Ncols_recv =0;
56   for (i=0; i<nlrows_is; i++) {
57     Ncols_recv             += ncols_recv[i];
58     ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
59   }
60   Ncols_send = 0;
61   for (i=0; i<nlrows_mat; i++) {
62     Ncols_send += ncols_send[i];
63   }
64   PetscCall(PetscCalloc1(Ncols_recv,&iremote));
65   PetscCall(PetscCalloc1(Ncols_recv,&adjncy_recv));
66   nleaves = Ncols_recv;
67   Ncols_recv = 0;
68   for (i=0; i<nlrows_is; i++) {
69     PetscCall(PetscLayoutFindOwner(rmap,irows_indices[i],&owner));
70     for (j=0; j<ncols_recv[i]; j++) {
71       iremote[Ncols_recv].rank    = owner;
72       iremote[Ncols_recv++].index = xadj_recv[i]+j;
73     }
74   }
75   PetscCall(ISRestoreIndices(irows,&irows_indices));
76   /*if we need to deal with edge weights ???*/
77   if (a->useedgeweights) PetscCall(PetscCalloc1(Ncols_recv,&values_recv));
78   nroots = Ncols_send;
79   PetscCall(PetscSFCreate(comm,&sf));
80   PetscCall(PetscSFSetGraph(sf,nroots,nleaves,NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER));
81   PetscCall(PetscSFSetType(sf,PETSCSFBASIC));
82   PetscCall(PetscSFSetFromOptions(sf));
83   PetscCall(PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
84   PetscCall(PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv,MPI_REPLACE));
85   if (a->useedgeweights) {
86     PetscCall(PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
87     PetscCall(PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv,MPI_REPLACE));
88   }
89   PetscCall(PetscSFDestroy(&sf));
90   PetscCall(MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done));
91   PetscCall(ISGetLocalSize(icols,&icols_n));
92   PetscCall(ISGetIndices(icols,&icols_indices));
93   rnclos = 0;
94   for (i=0; i<nlrows_is; i++) {
95     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
96       PetscCall(PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc));
97       if (loc<0) {
98         adjncy_recv[j] = -1;
99         if (a->useedgeweights) values_recv[j] = -1;
100         ncols_recv[i]--;
101       } else {
102         rnclos++;
103       }
104     }
105   }
106   PetscCall(ISRestoreIndices(icols,&icols_indices));
107   PetscCall(PetscCalloc1(rnclos,&sadjncy));
108   if (a->useedgeweights) PetscCall(PetscCalloc1(rnclos,&svalues));
109   PetscCall(PetscCalloc1(nlrows_is+1,&sxadj));
110   rnclos = 0;
111   for (i=0; i<nlrows_is; i++) {
112     for (j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++) {
113       if (adjncy_recv[j]<0) continue;
114       sadjncy[rnclos] = adjncy_recv[j];
115       if (a->useedgeweights) svalues[rnclos] = values_recv[j];
116       rnclos++;
117     }
118   }
119   for (i=0; i<nlrows_is; i++) {
120     sxadj[i+1] = sxadj[i]+ncols_recv[i];
121   }
122   if (sadj_xadj)  { *sadj_xadj = sxadj;} else    PetscCall(PetscFree(sxadj));
123   if (sadj_adjncy) { *sadj_adjncy = sadjncy;} else PetscCall(PetscFree(sadjncy));
124   if (sadj_values) {
125     if (a->useedgeweights) *sadj_values = svalues; else *sadj_values=NULL;
126   } else {
127     if (a->useedgeweights) PetscCall(PetscFree(svalues));
128   }
129   PetscCall(PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv));
130   PetscCall(PetscFree(adjncy_recv));
131   if (a->useedgeweights) PetscCall(PetscFree(values_recv));
132   PetscFunctionReturn(0);
133 }
134 
135 static PetscErrorCode MatCreateSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
136 {
137   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
138   PetscInt          *indices,nindx,j,k,loc;
139   PetscMPIInt        issame;
140   const PetscInt    *irow_indices,*icol_indices;
141   MPI_Comm           scomm_row,scomm_col,scomm_mat;
142 
143   PetscFunctionBegin;
144   nindx = 0;
145   /*
146    * Estimate a maximum number for allocating memory
147    */
148   for (i=0; i<n; i++) {
149     PetscCall(ISGetLocalSize(irow[i],&irow_n));
150     PetscCall(ISGetLocalSize(icol[i],&icol_n));
151     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
152   }
153   PetscCall(PetscMalloc1(nindx,&indices));
154   /* construct a submat */
155   if (scall == MAT_INITIAL_MATRIX) PetscCall(PetscMalloc1(n,submat));
156 
157   for (i=0; i<n; i++) {
158     if (subcomm) {
159       PetscCall(PetscObjectGetComm((PetscObject)irow[i],&scomm_row));
160       PetscCall(PetscObjectGetComm((PetscObject)icol[i],&scomm_col));
161       PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_col,&issame));
162       PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set");
163       PetscCallMPI(MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame));
164       PetscCheck(issame != MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix");
165     } else {
166       scomm_row = PETSC_COMM_SELF;
167     }
168     /*get sub-matrix data*/
169     sxadj=NULL; sadjncy=NULL; svalues=NULL;
170     PetscCall(MatCreateSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues));
171     PetscCall(ISGetLocalSize(irow[i],&irow_n));
172     PetscCall(ISGetLocalSize(icol[i],&icol_n));
173     PetscCall(ISGetIndices(irow[i],&irow_indices));
174     PetscCall(PetscArraycpy(indices,irow_indices,irow_n));
175     PetscCall(ISRestoreIndices(irow[i],&irow_indices));
176     PetscCall(ISGetIndices(icol[i],&icol_indices));
177     PetscCall(PetscArraycpy(indices+irow_n,icol_indices,icol_n));
178     PetscCall(ISRestoreIndices(icol[i],&icol_indices));
179     nindx = irow_n+icol_n;
180     PetscCall(PetscSortRemoveDupsInt(&nindx,indices));
181     /* renumber columns */
182     for (j=0; j<irow_n; j++) {
183       for (k=sxadj[j]; k<sxadj[j+1]; k++) {
184         PetscCall(PetscFindInt(sadjncy[k],nindx,indices,&loc));
185         PetscCheck(loc>=0,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %" PetscInt_FMT,sadjncy[k]);
186         sadjncy[k] = loc;
187       }
188     }
189     if (scall==MAT_INITIAL_MATRIX) {
190       PetscCall(MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]));
191     } else {
192        Mat                sadj = *(submat[i]);
193        Mat_MPIAdj         *sa  = (Mat_MPIAdj*)((sadj)->data);
194        PetscCall(PetscObjectGetComm((PetscObject)sadj,&scomm_mat));
195        PetscCallMPI(MPI_Comm_compare(scomm_row,scomm_mat,&issame));
196        PetscCheck(issame == MPI_IDENT,PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set");
197        PetscCall(PetscArraycpy(sa->i,sxadj,irow_n+1));
198        PetscCall(PetscArraycpy(sa->j,sadjncy,sxadj[irow_n]));
199        if (svalues) PetscCall(PetscArraycpy(sa->values,svalues,sxadj[irow_n]));
200        PetscCall(PetscFree(sxadj));
201        PetscCall(PetscFree(sadjncy));
202        if (svalues) PetscCall(PetscFree(svalues));
203     }
204   }
205   PetscCall(PetscFree(indices));
206   PetscFunctionReturn(0);
207 }
208 
209 static PetscErrorCode MatCreateSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
210 {
211   /*get sub-matrices across a sub communicator */
212   PetscFunctionBegin;
213   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat));
214   PetscFunctionReturn(0);
215 }
216 
217 static PetscErrorCode MatCreateSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
218 {
219   PetscFunctionBegin;
220   /*get sub-matrices based on PETSC_COMM_SELF */
221   PetscCall(MatCreateSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat));
222   PetscFunctionReturn(0);
223 }
224 
225 static PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
226 {
227   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
228   PetscInt          i,j,m = A->rmap->n;
229   const char        *name;
230   PetscViewerFormat format;
231 
232   PetscFunctionBegin;
233   PetscCall(PetscObjectGetName((PetscObject)A,&name));
234   PetscCall(PetscViewerGetFormat(viewer,&format));
235   if (format == PETSC_VIEWER_ASCII_INFO) {
236     PetscFunctionReturn(0);
237   } else {
238     PetscCheck(format != PETSC_VIEWER_ASCII_MATLAB,PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
239     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_FALSE));
240     PetscCall(PetscViewerASCIIPushSynchronized(viewer));
241     for (i=0; i<m; i++) {
242       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"row %" PetscInt_FMT ":",i+A->rmap->rstart));
243       for (j=a->i[i]; j<a->i[i+1]; j++) {
244         if (a->values) {
245           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," (%" PetscInt_FMT ", %" PetscInt_FMT ") ",a->j[j], a->values[j]));
246         } else {
247           PetscCall(PetscViewerASCIISynchronizedPrintf(viewer," %" PetscInt_FMT " ",a->j[j]));
248         }
249       }
250       PetscCall(PetscViewerASCIISynchronizedPrintf(viewer,"\n"));
251     }
252     PetscCall(PetscViewerASCIIUseTabs(viewer,PETSC_TRUE));
253     PetscCall(PetscViewerFlush(viewer));
254     PetscCall(PetscViewerASCIIPopSynchronized(viewer));
255   }
256   PetscFunctionReturn(0);
257 }
258 
259 static PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
260 {
261   PetscBool      iascii;
262 
263   PetscFunctionBegin;
264   PetscCall(PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii));
265   if (iascii) PetscCall(MatView_MPIAdj_ASCII(A,viewer));
266   PetscFunctionReturn(0);
267 }
268 
269 static PetscErrorCode MatDestroy_MPIAdj(Mat mat)
270 {
271   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
272 
273   PetscFunctionBegin;
274 #if defined(PETSC_USE_LOG)
275   PetscLogObjectState((PetscObject)mat,"Rows=%" PetscInt_FMT ", Cols=%" PetscInt_FMT ", NZ=%" PetscInt_FMT,mat->rmap->n,mat->cmap->n,a->nz);
276 #endif
277   PetscCall(PetscFree(a->diag));
278   if (a->freeaij) {
279     if (a->freeaijwithfree) {
280       if (a->i) free(a->i);
281       if (a->j) free(a->j);
282     } else {
283       PetscCall(PetscFree(a->i));
284       PetscCall(PetscFree(a->j));
285       PetscCall(PetscFree(a->values));
286     }
287   }
288   PetscCall(PetscFree(a->rowvalues));
289   PetscCall(PetscFree(mat->data));
290   PetscCall(PetscObjectChangeTypeName((PetscObject)mat,NULL));
291   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL));
292   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL));
293   PetscCall(PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjToSeq_C",NULL));
294   PetscFunctionReturn(0);
295 }
296 
297 static PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
298 {
299   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
300 
301   PetscFunctionBegin;
302   switch (op) {
303   case MAT_SYMMETRIC:
304   case MAT_STRUCTURALLY_SYMMETRIC:
305   case MAT_HERMITIAN:
306     a->symmetric = flg;
307     break;
308   case MAT_SYMMETRY_ETERNAL:
309     break;
310   default:
311     PetscCall(PetscInfo(A,"Option %s ignored\n",MatOptions[op]));
312     break;
313   }
314   PetscFunctionReturn(0);
315 }
316 
317 static PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
318 {
319   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
320 
321   PetscFunctionBegin;
322   row -= A->rmap->rstart;
323   PetscCheck(row >= 0 && row < A->rmap->n,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
324   *nz = a->i[row+1] - a->i[row];
325   if (v) {
326     PetscInt j;
327     if (a->rowvalues_alloc < *nz) {
328       PetscCall(PetscFree(a->rowvalues));
329       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
330       PetscCall(PetscMalloc1(a->rowvalues_alloc,&a->rowvalues));
331     }
332     for (j=0; j<*nz; j++) {
333       a->rowvalues[j] = a->values ? a->values[a->i[row]+j]:1.0;
334     }
335     *v = (*nz) ? a->rowvalues : NULL;
336   }
337   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
338   PetscFunctionReturn(0);
339 }
340 
341 static PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
342 {
343   PetscFunctionBegin;
344   PetscFunctionReturn(0);
345 }
346 
347 static PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
348 {
349   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
350   PetscBool      flag;
351 
352   PetscFunctionBegin;
353   /* If the  matrix dimensions are not equal,or no of nonzeros */
354   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
355     flag = PETSC_FALSE;
356   }
357 
358   /* if the a->i are the same */
359   PetscCall(PetscArraycmp(a->i,b->i,A->rmap->n+1,&flag));
360 
361   /* if a->j are the same */
362   PetscCall(PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag));
363 
364   PetscCall(MPIU_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A)));
365   PetscFunctionReturn(0);
366 }
367 
368 static PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
369 {
370   PetscInt   i;
371   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
372   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
373 
374   PetscFunctionBegin;
375   *m    = A->rmap->n;
376   *ia   = a->i;
377   *ja   = a->j;
378   *done = PETSC_TRUE;
379   if (oshift) {
380     for (i=0; i<(*ia)[*m]; i++) {
381       (*ja)[i]++;
382     }
383     for (i=0; i<=(*m); i++) (*ia)[i]++;
384   }
385   PetscFunctionReturn(0);
386 }
387 
388 static PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
389 {
390   PetscInt   i;
391   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
392   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
393 
394   PetscFunctionBegin;
395   PetscCheck(!ia || a->i == *ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
396   PetscCheck(!ja || a->j == *ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
397   if (oshift) {
398     PetscCheck(ia,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inia[] argument");
399     PetscCheck(ja,PetscObjectComm((PetscObject)A),PETSC_ERR_ARG_WRONGSTATE,"If oshift then you must passed in inja[] argument");
400     for (i=0; i<=(*m); i++) (*ia)[i]--;
401     for (i=0; i<(*ia)[*m]; i++) {
402       (*ja)[i]--;
403     }
404   }
405   PetscFunctionReturn(0);
406 }
407 
408 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
409 {
410   Mat               B;
411   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
412   const PetscInt    *rj;
413   const PetscScalar *ra;
414   MPI_Comm          comm;
415 
416   PetscFunctionBegin;
417   PetscCall(MatGetSize(A,NULL,&N));
418   PetscCall(MatGetLocalSize(A,&m,NULL));
419   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
420 
421   /* count the number of nonzeros per row */
422   for (i=0; i<m; i++) {
423     PetscCall(MatGetRow(A,i+rstart,&len,&rj,NULL));
424     for (j=0; j<len; j++) {
425       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
426     }
427     nzeros += len;
428     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,NULL));
429   }
430 
431   /* malloc space for nonzeros */
432   PetscCall(PetscMalloc1(nzeros+1,&a));
433   PetscCall(PetscMalloc1(N+1,&ia));
434   PetscCall(PetscMalloc1(nzeros+1,&ja));
435 
436   nzeros = 0;
437   ia[0]  = 0;
438   for (i=0; i<m; i++) {
439     PetscCall(MatGetRow(A,i+rstart,&len,&rj,&ra));
440     cnt  = 0;
441     for (j=0; j<len; j++) {
442       if (rj[j] != i+rstart) { /* if not diagonal */
443         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
444         ja[nzeros+cnt++] = rj[j];
445       }
446     }
447     PetscCall(MatRestoreRow(A,i+rstart,&len,&rj,&ra));
448     nzeros += cnt;
449     ia[i+1] = nzeros;
450   }
451 
452   PetscCall(PetscObjectGetComm((PetscObject)A,&comm));
453   PetscCall(MatCreate(comm,&B));
454   PetscCall(MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
455   PetscCall(MatSetType(B,type));
456   PetscCall(MatMPIAdjSetPreallocation(B,ia,ja,a));
457 
458   if (reuse == MAT_INPLACE_MATRIX) {
459     PetscCall(MatHeaderReplace(A,&B));
460   } else {
461     *newmat = B;
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 PetscErrorCode MatSetValues_MPIAdj(Mat A,PetscInt m,const PetscInt *rows,PetscInt n,const PetscInt *cols,const PetscScalar *values,InsertMode im)
467 {
468   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
469   PetscInt   rStart, rEnd, cStart, cEnd;
470 
471   PetscFunctionBegin;
472   PetscCheck(!adj->i,PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"Matrix is already assembled, cannot change its values");
473   PetscCall(MatGetOwnershipRange(A, &rStart, &rEnd));
474   PetscCall(MatGetOwnershipRangeColumn(A, &cStart, &cEnd));
475   if (!adj->ht) {
476     PetscCall(PetscHSetIJCreate(&adj->ht));
477     PetscCall(MatStashCreate_Private(PetscObjectComm((PetscObject) A), 1, &A->stash));
478     PetscCall(PetscLayoutSetUp(A->rmap));
479     PetscCall(PetscLayoutSetUp(A->cmap));
480   }
481   for (PetscInt r = 0; r < m; ++r) {
482     PetscHashIJKey key;
483 
484     key.i = rows[r];
485     if (key.i < 0) continue;
486     if ((key.i < rStart) || (key.i >= rEnd)) {
487       PetscCall(MatStashValuesRow_Private(&A->stash, key.i, n, cols, values, PETSC_FALSE));
488     } else {
489       for (PetscInt c = 0; c < n; ++c) {
490         key.j = cols[c];
491         if (key.j < 0 || key.i == key.j) continue;
492         PetscCall(PetscHSetIJAdd(adj->ht, key));
493       }
494     }
495   }
496   PetscFunctionReturn(0);
497 }
498 
499 PetscErrorCode MatAssemblyBegin_MPIAdj(Mat A, MatAssemblyType type)
500 {
501   PetscInt   nstash, reallocs;
502   Mat_MPIAdj *adj = (Mat_MPIAdj*)A->data;
503 
504   PetscFunctionBegin;
505   if (!adj->ht) {
506     PetscCall(PetscHSetIJCreate(&adj->ht));
507     PetscCall(PetscLayoutSetUp(A->rmap));
508     PetscCall(PetscLayoutSetUp(A->cmap));
509   }
510   PetscCall(MatStashScatterBegin_Private(A, &A->stash, A->rmap->range));
511   PetscCall(MatStashGetInfo_Private(&A->stash, &nstash, &reallocs));
512   PetscCall(PetscInfo(A, "Stash has %" PetscInt_FMT " entries, uses %" PetscInt_FMT " mallocs.\n", nstash, reallocs));
513   PetscFunctionReturn(0);
514 }
515 
516 PetscErrorCode MatAssemblyEnd_MPIAdj(Mat A, MatAssemblyType type)
517 {
518   PetscScalar    *val;
519   PetscInt       *row, *col,m,rstart,*rowstarts;
520   PetscInt       i, j, ncols, flg, nz;
521   PetscMPIInt    n;
522   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
523   PetscHashIter  hi;
524   PetscHashIJKey key;
525   PetscHSetIJ    ht = adj->ht;
526 
527   PetscFunctionBegin;
528   while (1) {
529     PetscCall(MatStashScatterGetMesg_Private(&A->stash, &n, &row, &col, &val, &flg));
530      if (!flg) break;
531 
532     for (i = 0; i < n;) {
533       /* Identify the consecutive vals belonging to the same row */
534       for (j = i, rstart = row[j]; j < n; j++) {
535         if (row[j] != rstart) break;
536       }
537       if (j < n) ncols = j-i;
538       else       ncols = n-i;
539       /* Set all these values with a single function call */
540       PetscCall(MatSetValues_MPIAdj(A, 1, row+i, ncols, col+i, val+i, INSERT_VALUES));
541       i = j;
542     }
543   }
544   PetscCall(MatStashScatterEnd_Private(&A->stash));
545   PetscCall(MatStashDestroy_Private(&A->stash));
546 
547   PetscCall(MatGetLocalSize(A,&m,NULL));
548   PetscCall(MatGetOwnershipRange(A,&rstart,NULL));
549   PetscCall(PetscCalloc1(m+1,&rowstarts));
550   PetscHashIterBegin(ht,hi);
551   for (; !PetscHashIterAtEnd(ht,hi);) {
552     PetscHashIterGetKey(ht,hi,key);
553     rowstarts[key.i - rstart + 1]++;
554     PetscHashIterNext(ht,hi);
555   }
556   for (i=1; i<m+1; i++) {
557     rowstarts[i] = rowstarts[i-1] + rowstarts[i];
558   }
559 
560   PetscCall(PetscHSetIJGetSize(ht,&nz));
561   PetscCall(PetscMalloc1(nz,&col));
562   PetscHashIterBegin(ht,hi);
563   for (; !PetscHashIterAtEnd(ht,hi);) {
564     PetscHashIterGetKey(ht,hi,key);
565     col[rowstarts[key.i - rstart]++] = key.j;
566     PetscHashIterNext(ht,hi);
567   }
568   PetscCall(PetscHSetIJDestroy(&ht));
569   for (i=m; i>0; i--) {
570     rowstarts[i] = rowstarts[i-1];
571   }
572   rowstarts[0] = 0;
573 
574   for (PetscInt i=0; i<m; i++) {
575     PetscCall(PetscSortInt(rowstarts[i+1]-rowstarts[i],&col[rowstarts[i]]));
576   }
577 
578   adj->i       = rowstarts;
579   adj->j       = col;
580   adj->freeaij = PETSC_TRUE;
581   PetscFunctionReturn(0);
582 }
583 
584 /* -------------------------------------------------------------------*/
585 static struct _MatOps MatOps_Values = {MatSetValues_MPIAdj,
586                                        MatGetRow_MPIAdj,
587                                        MatRestoreRow_MPIAdj,
588                                        NULL,
589                                 /* 4*/ NULL,
590                                        NULL,
591                                        NULL,
592                                        NULL,
593                                        NULL,
594                                        NULL,
595                                 /*10*/ NULL,
596                                        NULL,
597                                        NULL,
598                                        NULL,
599                                        NULL,
600                                 /*15*/ NULL,
601                                        MatEqual_MPIAdj,
602                                        NULL,
603                                        NULL,
604                                        NULL,
605                                 /*20*/ MatAssemblyBegin_MPIAdj,
606                                        MatAssemblyEnd_MPIAdj,
607                                        MatSetOption_MPIAdj,
608                                        NULL,
609                                 /*24*/ NULL,
610                                        NULL,
611                                        NULL,
612                                        NULL,
613                                        NULL,
614                                 /*29*/ NULL,
615                                        NULL,
616                                        NULL,
617                                        NULL,
618                                        NULL,
619                                 /*34*/ NULL,
620                                        NULL,
621                                        NULL,
622                                        NULL,
623                                        NULL,
624                                 /*39*/ NULL,
625                                        MatCreateSubMatrices_MPIAdj,
626                                        NULL,
627                                        NULL,
628                                        NULL,
629                                 /*44*/ NULL,
630                                        NULL,
631                                        MatShift_Basic,
632                                        NULL,
633                                        NULL,
634                                 /*49*/ NULL,
635                                        MatGetRowIJ_MPIAdj,
636                                        MatRestoreRowIJ_MPIAdj,
637                                        NULL,
638                                        NULL,
639                                 /*54*/ NULL,
640                                        NULL,
641                                        NULL,
642                                        NULL,
643                                        NULL,
644                                 /*59*/ NULL,
645                                        MatDestroy_MPIAdj,
646                                        MatView_MPIAdj,
647                                        MatConvertFrom_MPIAdj,
648                                        NULL,
649                                 /*64*/ NULL,
650                                        NULL,
651                                        NULL,
652                                        NULL,
653                                        NULL,
654                                 /*69*/ NULL,
655                                        NULL,
656                                        NULL,
657                                        NULL,
658                                        NULL,
659                                 /*74*/ NULL,
660                                        NULL,
661                                        NULL,
662                                        NULL,
663                                        NULL,
664                                 /*79*/ NULL,
665                                        NULL,
666                                        NULL,
667                                        NULL,
668                                        NULL,
669                                 /*84*/ NULL,
670                                        NULL,
671                                        NULL,
672                                        NULL,
673                                        NULL,
674                                 /*89*/ NULL,
675                                        NULL,
676                                        NULL,
677                                        NULL,
678                                        NULL,
679                                 /*94*/ NULL,
680                                        NULL,
681                                        NULL,
682                                        NULL,
683                                        NULL,
684                                 /*99*/ NULL,
685                                        NULL,
686                                        NULL,
687                                        NULL,
688                                        NULL,
689                                /*104*/ NULL,
690                                        NULL,
691                                        NULL,
692                                        NULL,
693                                        NULL,
694                                /*109*/ NULL,
695                                        NULL,
696                                        NULL,
697                                        NULL,
698                                        NULL,
699                                /*114*/ NULL,
700                                        NULL,
701                                        NULL,
702                                        NULL,
703                                        NULL,
704                                /*119*/ NULL,
705                                        NULL,
706                                        NULL,
707                                        NULL,
708                                        NULL,
709                                /*124*/ NULL,
710                                        NULL,
711                                        NULL,
712                                        NULL,
713                                        MatCreateSubMatricesMPI_MPIAdj,
714                                /*129*/ NULL,
715                                        NULL,
716                                        NULL,
717                                        NULL,
718                                        NULL,
719                                /*134*/ NULL,
720                                        NULL,
721                                        NULL,
722                                        NULL,
723                                        NULL,
724                                /*139*/ NULL,
725                                        NULL,
726                                        NULL,
727                                        NULL,
728                                        NULL,
729                                 /*144*/NULL,
730                                        NULL,
731                                        NULL,
732                                        NULL,
733                                        NULL,
734                                        NULL
735 };
736 
737 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
738 {
739   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
740   PetscBool       useedgeweights;
741 
742   PetscFunctionBegin;
743   PetscCall(PetscLayoutSetUp(B->rmap));
744   PetscCall(PetscLayoutSetUp(B->cmap));
745   if (values) useedgeweights = PETSC_TRUE; else useedgeweights = PETSC_FALSE;
746   /* Make everybody knows if they are using edge weights or not */
747   PetscCall(MPIU_Allreduce((int*)&useedgeweights,(int*)&b->useedgeweights,1,MPI_INT,MPI_MAX,PetscObjectComm((PetscObject)B)));
748 
749   if (PetscDefined(USE_DEBUG)) {
750     PetscInt ii;
751 
752     PetscCheck(i[0] == 0,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %" PetscInt_FMT,i[0]);
753     for (ii=1; ii<B->rmap->n; ii++) {
754       PetscCheck(i[ii] >= 0 && i[ii] >= i[ii-1],PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%" PetscInt_FMT "]=%" PetscInt_FMT " index is out of range: i[%" PetscInt_FMT "]=%" PetscInt_FMT,ii,i[ii],ii-1,i[ii-1]);
755     }
756     for (ii=0; ii<i[B->rmap->n]; ii++) {
757       PetscCheck(j[ii] >= 0 && j[ii] < B->cmap->N,PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %" PetscInt_FMT " out of range %" PetscInt_FMT,ii,j[ii]);
758     }
759   }
760   b->j      = j;
761   b->i      = i;
762   b->values = values;
763 
764   b->nz        = i[B->rmap->n];
765   b->diag      = NULL;
766   b->symmetric = PETSC_FALSE;
767   b->freeaij   = PETSC_TRUE;
768 
769   B->ops->assemblybegin = NULL;
770   B->ops->assemblyend   = NULL;
771   PetscCall(MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY));
772   PetscCall(MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY));
773   PetscCall(MatStashDestroy_Private(&B->stash));
774   PetscFunctionReturn(0);
775 }
776 
777 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
778 {
779   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
780   const PetscInt *ranges;
781   MPI_Comm       acomm,bcomm;
782   MPI_Group      agroup,bgroup;
783   PetscMPIInt    i,rank,size,nranks,*ranks;
784 
785   PetscFunctionBegin;
786   *B    = NULL;
787   PetscCall(PetscObjectGetComm((PetscObject)A,&acomm));
788   PetscCallMPI(MPI_Comm_size(acomm,&size));
789   PetscCallMPI(MPI_Comm_size(acomm,&rank));
790   PetscCall(MatGetOwnershipRanges(A,&ranges));
791   for (i=0,nranks=0; i<size; i++) {
792     if (ranges[i+1] - ranges[i] > 0) nranks++;
793   }
794   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
795     PetscCall(PetscObjectReference((PetscObject)A));
796     *B   = A;
797     PetscFunctionReturn(0);
798   }
799 
800   PetscCall(PetscMalloc1(nranks,&ranks));
801   for (i=0,nranks=0; i<size; i++) {
802     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
803   }
804   PetscCallMPI(MPI_Comm_group(acomm,&agroup));
805   PetscCallMPI(MPI_Group_incl(agroup,nranks,ranks,&bgroup));
806   PetscCall(PetscFree(ranks));
807   PetscCallMPI(MPI_Comm_create(acomm,bgroup,&bcomm));
808   PetscCallMPI(MPI_Group_free(&agroup));
809   PetscCallMPI(MPI_Group_free(&bgroup));
810   if (bcomm != MPI_COMM_NULL) {
811     PetscInt   m,N;
812     Mat_MPIAdj *b;
813     PetscCall(MatGetLocalSize(A,&m,NULL));
814     PetscCall(MatGetSize(A,NULL,&N));
815     PetscCall(MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B));
816     b          = (Mat_MPIAdj*)(*B)->data;
817     b->freeaij = PETSC_FALSE;
818     PetscCallMPI(MPI_Comm_free(&bcomm));
819   }
820   PetscFunctionReturn(0);
821 }
822 
823 PetscErrorCode  MatMPIAdjToSeq_MPIAdj(Mat A,Mat *B)
824 {
825   PetscInt       M,N,*II,*J,NZ,nz,m,nzstart,i;
826   PetscInt       *Values = NULL;
827   Mat_MPIAdj     *adj = (Mat_MPIAdj*)A->data;
828   PetscMPIInt    mnz,mm,*allnz,*allm,size,*dispnz,*dispm;
829 
830   PetscFunctionBegin;
831   PetscCallMPI(MPI_Comm_size(PetscObjectComm((PetscObject)A),&size));
832   PetscCall(MatGetSize(A,&M,&N));
833   PetscCall(MatGetLocalSize(A,&m,NULL));
834   nz   = adj->nz;
835   PetscCheck(adj->i[m] == nz,PETSC_COMM_SELF,PETSC_ERR_PLIB,"nz %" PetscInt_FMT " not correct i[m] %" PetscInt_FMT,nz,adj->i[m]);
836   PetscCallMPI(MPI_Allreduce(&nz,&NZ,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
837 
838   PetscCall(PetscMPIIntCast(nz,&mnz));
839   PetscCall(PetscMalloc2(size,&allnz,size,&dispnz));
840   PetscCallMPI(MPI_Allgather(&mnz,1,MPI_INT,allnz,1,MPI_INT,PetscObjectComm((PetscObject)A)));
841   dispnz[0] = 0; for (i=1; i<size; i++) dispnz[i] = dispnz[i-1]+ allnz[i-1];
842   if (adj->values) {
843     PetscCall(PetscMalloc1(NZ,&Values));
844     PetscCallMPI(MPI_Allgatherv(adj->values,mnz,MPIU_INT,Values,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
845   }
846   PetscCall(PetscMalloc1(NZ,&J));
847   PetscCallMPI(MPI_Allgatherv(adj->j,mnz,MPIU_INT,J,allnz,dispnz,MPIU_INT,PetscObjectComm((PetscObject)A)));
848   PetscCall(PetscFree2(allnz,dispnz));
849   PetscCallMPI(MPI_Scan(&nz,&nzstart,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)A)));
850   nzstart -= nz;
851   /* shift the i[] values so they will be correct after being received */
852   for (i=0; i<m; i++) adj->i[i] += nzstart;
853   PetscCall(PetscMalloc1(M+1,&II));
854   PetscCall(PetscMPIIntCast(m,&mm));
855   PetscCall(PetscMalloc2(size,&allm,size,&dispm));
856   PetscCallMPI(MPI_Allgather(&mm,1,MPI_INT,allm,1,MPI_INT,PetscObjectComm((PetscObject)A)));
857   dispm[0] = 0; for (i=1; i<size; i++) dispm[i] = dispm[i-1]+ allm[i-1];
858   PetscCallMPI(MPI_Allgatherv(adj->i,mm,MPIU_INT,II,allm,dispm,MPIU_INT,PetscObjectComm((PetscObject)A)));
859   PetscCall(PetscFree2(allm,dispm));
860   II[M] = NZ;
861   /* shift the i[] values back */
862   for (i=0; i<m; i++) adj->i[i] -= nzstart;
863   PetscCall(MatCreateMPIAdj(PETSC_COMM_SELF,M,N,II,J,Values,B));
864   PetscFunctionReturn(0);
865 }
866 
867 /*@
868    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
869 
870    Collective
871 
872    Input Parameter:
873 .  A - original MPIAdj matrix
874 
875    Output Parameter:
876 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
877 
878    Level: developer
879 
880    Note:
881    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
882 
883    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
884 
885 .seealso: `MatCreateMPIAdj()`
886 @*/
887 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
888 {
889   PetscFunctionBegin;
890   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
891   PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));
892   PetscFunctionReturn(0);
893 }
894 
895 /*MC
896    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
897    intended for use constructing orderings and partitionings.
898 
899   Level: beginner
900 
901   Notes:
902     You can provide values to the matrix using MatMPIAdjSetPreallocation(), MatCreateMPIAdj(), or
903     by calling MatSetValues() and MatAssemblyBegin() followed by MatAssemblyEnd()
904 
905 .seealso: `MatCreateMPIAdj()`, `MatMPIAdjSetPreallocation()`, `MatSetValues()`
906 M*/
907 
908 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
909 {
910   Mat_MPIAdj     *b;
911 
912   PetscFunctionBegin;
913   PetscCall(PetscNewLog(B,&b));
914   B->data      = (void*)b;
915   PetscCall(PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps)));
916   B->assembled = PETSC_FALSE;
917   B->preallocated = PETSC_TRUE; /* so that MatSetValues() may be used */
918 
919   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj));
920   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj));
921   PetscCall(PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjToSeq_C",MatMPIAdjToSeq_MPIAdj));
922   PetscCall(PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ));
923   PetscFunctionReturn(0);
924 }
925 
926 /*@C
927    MatMPIAdjToSeq - Converts an parallel MPIAdj matrix to complete MPIAdj on each process (needed by sequential preconditioners)
928 
929    Logically Collective
930 
931    Input Parameter:
932 .  A - the matrix
933 
934    Output Parameter:
935 .  B - the same matrix on all processes
936 
937    Level: intermediate
938 
939 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`
940 @*/
941 PetscErrorCode  MatMPIAdjToSeq(Mat A,Mat *B)
942 {
943   PetscFunctionBegin;
944   PetscUseMethod(A,"MatMPIAdjToSeq_C",(Mat,Mat*),(A,B));
945   PetscFunctionReturn(0);
946 }
947 
948 /*@C
949    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
950 
951    Logically Collective
952 
953    Input Parameters:
954 +  A - the matrix
955 .  i - the indices into j for the start of each row
956 .  j - the column indices for each row (sorted for each row).
957        The indices in i and j start with zero (NOT with one).
958 -  values - [optional] edge weights
959 
960    Level: intermediate
961 
962 .seealso: `MatCreate()`, `MatCreateMPIAdj()`, `MatSetValues()`, `MATMPIADJ`, `MatCreateMPIAdj()`
963 @*/
964 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
965 {
966   PetscFunctionBegin;
967   PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));
968   PetscFunctionReturn(0);
969 }
970 
971 /*@C
972    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
973    The matrix does not have numerical values associated with it, but is
974    intended for ordering (to reduce bandwidth etc) and partitioning.
975 
976    Collective
977 
978    Input Parameters:
979 +  comm - MPI communicator
980 .  m - number of local rows
981 .  N - number of global columns
982 .  i - the indices into j for the start of each row
983 .  j - the column indices for each row (sorted for each row).
984        The indices in i and j start with zero (NOT with one).
985 -  values -[optional] edge weights
986 
987    Output Parameter:
988 .  A - the matrix
989 
990    Level: intermediate
991 
992    Notes:
993    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
994    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
995    call from Fortran you need not create the arrays with PetscMalloc().
996 
997    You should not include the matrix diagonals.
998 
999    If you already have a matrix, you can create its adjacency matrix by a call
1000    to MatConvert(), specifying a type of MATMPIADJ.
1001 
1002    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
1003 
1004 .seealso: `MatCreate()`, `MatConvert()`, `MatGetOrdering()`, `MATMPIADJ`, `MatMPIAdjSetPreallocation()`
1005 @*/
1006 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
1007 {
1008   PetscFunctionBegin;
1009   PetscCall(MatCreate(comm,A));
1010   PetscCall(MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N));
1011   PetscCall(MatSetType(*A,MATMPIADJ));
1012   PetscCall(MatMPIAdjSetPreallocation(*A,i,j,values));
1013   PetscFunctionReturn(0);
1014 }
1015