xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 2b1d87635c5979121742b260db309414b85f9c32)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
79   ierr = PetscFree(mat->data);CHKERRQ(ierr);
80   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "MatSetOption_MPIAdj"
88 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
89 {
90   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
91   PetscErrorCode ierr;
92 
93   PetscFunctionBegin;
94   switch (op) {
95   case MAT_SYMMETRIC:
96   case MAT_STRUCTURALLY_SYMMETRIC:
97   case MAT_HERMITIAN:
98     a->symmetric = flg;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,m = A->rmap->n;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->rmap->n; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         a->diag[i] = j;
129         break;
130       }
131     }
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscErrorCode ierr;
142 
143   PetscFunctionBegin;
144   row -= A->rmap->rstart;
145 
146   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) {
150     PetscInt j;
151     if (a->rowvalues_alloc < *nz) {
152       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
153       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
154       ierr = PetscMalloc(a->rowvalues_alloc*sizeof(PetscScalar),&a->rowvalues);CHKERRQ(ierr);
155     }
156     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
157     *v = (*nz) ? a->rowvalues : NULL;
158   }
159   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNCT__
164 #define __FUNCT__ "MatRestoreRow_MPIAdj"
165 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
166 {
167   PetscFunctionBegin;
168   PetscFunctionReturn(0);
169 }
170 
171 #undef __FUNCT__
172 #define __FUNCT__ "MatEqual_MPIAdj"
173 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
174 {
175   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
176   PetscErrorCode ierr;
177   PetscBool      flag;
178 
179   PetscFunctionBegin;
180   /* If the  matrix dimensions are not equal,or no of nonzeros */
181   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
182     flag = PETSC_FALSE;
183   }
184 
185   /* if the a->i are the same */
186   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
187 
188   /* if a->j are the same */
189   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
190 
191   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
192   PetscFunctionReturn(0);
193 }
194 
195 #undef __FUNCT__
196 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
197 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
198 {
199   PetscInt   i;
200   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
201   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
202 
203   PetscFunctionBegin;
204   *m    = A->rmap->n;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
220 {
221   PetscInt   i;
222   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
223   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 #undef __FUNCT__
238 #define __FUNCT__ "MatConvertFrom_MPIAdj"
239 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
240 {
241   Mat               B;
242   PetscErrorCode    ierr;
243   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
244   const PetscInt    *rj;
245   const PetscScalar *ra;
246   MPI_Comm          comm;
247 
248   PetscFunctionBegin;
249   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
250   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
251   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
252 
253   /* count the number of nonzeros per row */
254   for (i=0; i<m; i++) {
255     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
256     for (j=0; j<len; j++) {
257       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
258     }
259     nzeros += len;
260     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
261   }
262 
263   /* malloc space for nonzeros */
264   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
265   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
266   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
267 
268   nzeros = 0;
269   ia[0]  = 0;
270   for (i=0; i<m; i++) {
271     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
272     cnt  = 0;
273     for (j=0; j<len; j++) {
274       if (rj[j] != i+rstart) { /* if not diagonal */
275         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
276         ja[nzeros+cnt++] = rj[j];
277       }
278     }
279     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
280     nzeros += cnt;
281     ia[i+1] = nzeros;
282   }
283 
284   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
285   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
286   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
287   ierr = MatSetType(B,type);CHKERRQ(ierr);
288   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
289 
290   if (reuse == MAT_REUSE_MATRIX) {
291     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
292   } else {
293     *newmat = B;
294   }
295   PetscFunctionReturn(0);
296 }
297 
298 /* -------------------------------------------------------------------*/
299 static struct _MatOps MatOps_Values = {0,
300                                        MatGetRow_MPIAdj,
301                                        MatRestoreRow_MPIAdj,
302                                        0,
303                                 /* 4*/ 0,
304                                        0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                 /*10*/ 0,
310                                        0,
311                                        0,
312                                        0,
313                                        0,
314                                 /*15*/ 0,
315                                        MatEqual_MPIAdj,
316                                        0,
317                                        0,
318                                        0,
319                                 /*20*/ 0,
320                                        0,
321                                        MatSetOption_MPIAdj,
322                                        0,
323                                 /*24*/ 0,
324                                        0,
325                                        0,
326                                        0,
327                                        0,
328                                 /*29*/ 0,
329                                        0,
330                                        0,
331                                        0,
332                                        0,
333                                 /*34*/ 0,
334                                        0,
335                                        0,
336                                        0,
337                                        0,
338                                 /*39*/ 0,
339                                        0,
340                                        0,
341                                        0,
342                                        0,
343                                 /*44*/ 0,
344                                        0,
345                                        0,
346                                        0,
347                                        0,
348                                 /*49*/ 0,
349                                        MatGetRowIJ_MPIAdj,
350                                        MatRestoreRowIJ_MPIAdj,
351                                        0,
352                                        0,
353                                 /*54*/ 0,
354                                        0,
355                                        0,
356                                        0,
357                                        0,
358                                 /*59*/ 0,
359                                        MatDestroy_MPIAdj,
360                                        MatView_MPIAdj,
361                                        MatConvertFrom_MPIAdj,
362                                        0,
363                                 /*64*/ 0,
364                                        0,
365                                        0,
366                                        0,
367                                        0,
368                                 /*69*/ 0,
369                                        0,
370                                        0,
371                                        0,
372                                        0,
373                                 /*74*/ 0,
374                                        0,
375                                        0,
376                                        0,
377                                        0,
378                                 /*79*/ 0,
379                                        0,
380                                        0,
381                                        0,
382                                        0,
383                                 /*84*/ 0,
384                                        0,
385                                        0,
386                                        0,
387                                        0,
388                                 /*89*/ 0,
389                                        0,
390                                        0,
391                                        0,
392                                        0,
393                                 /*94*/ 0,
394                                        0,
395                                        0,
396                                        0,
397                                        0,
398                                 /*99*/ 0,
399                                        0,
400                                        0,
401                                        0,
402                                        0,
403                                /*104*/ 0,
404                                        0,
405                                        0,
406                                        0,
407                                        0,
408                                /*109*/ 0,
409                                        0,
410                                        0,
411                                        0,
412                                        0,
413                                /*114*/ 0,
414                                        0,
415                                        0,
416                                        0,
417                                        0,
418                                /*119*/ 0,
419                                        0,
420                                        0,
421                                        0,
422                                        0,
423                                /*124*/ 0,
424                                        0,
425                                        0,
426                                        0,
427                                        0,
428                                /*129*/ 0,
429                                        0,
430                                        0,
431                                        0,
432                                        0,
433                                /*134*/ 0,
434                                        0,
435                                        0,
436                                        0,
437                                        0,
438                                /*139*/ 0,
439                                        0
440 };
441 
442 #undef __FUNCT__
443 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
444 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
445 {
446   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
447   PetscErrorCode ierr;
448 #if defined(PETSC_USE_DEBUG)
449   PetscInt ii;
450 #endif
451 
452   PetscFunctionBegin;
453   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
454   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
455 
456 #if defined(PETSC_USE_DEBUG)
457   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
458   for (ii=1; ii<B->rmap->n; ii++) {
459     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
460   }
461   for (ii=0; ii<i[B->rmap->n]; ii++) {
462     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
463   }
464 #endif
465   B->preallocated = PETSC_TRUE;
466 
467   b->j      = j;
468   b->i      = i;
469   b->values = values;
470 
471   b->nz        = i[B->rmap->n];
472   b->diag      = 0;
473   b->symmetric = PETSC_FALSE;
474   b->freeaij   = PETSC_TRUE;
475 
476   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
477   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
478   PetscFunctionReturn(0);
479 }
480 
481 #undef __FUNCT__
482 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
483 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
484 {
485   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
486   PetscErrorCode ierr;
487   const PetscInt *ranges;
488   MPI_Comm       acomm,bcomm;
489   MPI_Group      agroup,bgroup;
490   PetscMPIInt    i,rank,size,nranks,*ranks;
491 
492   PetscFunctionBegin;
493   *B    = NULL;
494   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
495   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
496   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
497   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
498   for (i=0,nranks=0; i<size; i++) {
499     if (ranges[i+1] - ranges[i] > 0) nranks++;
500   }
501   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
502     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
503     *B   = A;
504     PetscFunctionReturn(0);
505   }
506 
507   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
508   for (i=0,nranks=0; i<size; i++) {
509     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
510   }
511   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
512   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
513   ierr = PetscFree(ranks);CHKERRQ(ierr);
514   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
515   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
516   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
517   if (bcomm != MPI_COMM_NULL) {
518     PetscInt   m,N;
519     Mat_MPIAdj *b;
520     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
521     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
522     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
523     b          = (Mat_MPIAdj*)(*B)->data;
524     b->freeaij = PETSC_FALSE;
525     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
526   }
527   PetscFunctionReturn(0);
528 }
529 
530 #undef __FUNCT__
531 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
532 /*@
533    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
534 
535    Collective
536 
537    Input Arguments:
538 .  A - original MPIAdj matrix
539 
540    Output Arguments:
541 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
542 
543    Level: developer
544 
545    Note:
546    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
547 
548    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
549 
550 .seealso: MatCreateMPIAdj()
551 @*/
552 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
553 {
554   PetscErrorCode ierr;
555 
556   PetscFunctionBegin;
557   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
558   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
559   PetscFunctionReturn(0);
560 }
561 
562 /*MC
563    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
564    intended for use constructing orderings and partitionings.
565 
566   Level: beginner
567 
568 .seealso: MatCreateMPIAdj
569 M*/
570 
571 #undef __FUNCT__
572 #define __FUNCT__ "MatCreate_MPIAdj"
573 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
574 {
575   Mat_MPIAdj     *b;
576   PetscErrorCode ierr;
577 
578   PetscFunctionBegin;
579   ierr         = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
580   B->data      = (void*)b;
581   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
582   B->assembled = PETSC_FALSE;
583 
584   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
585   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
586   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
587   PetscFunctionReturn(0);
588 }
589 
590 #undef __FUNCT__
591 #define __FUNCT__ "MatMPIAdjSetPreallocation"
592 /*@C
593    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
594 
595    Logically Collective on MPI_Comm
596 
597    Input Parameters:
598 +  A - the matrix
599 .  i - the indices into j for the start of each row
600 .  j - the column indices for each row (sorted for each row).
601        The indices in i and j start with zero (NOT with one).
602 -  values - [optional] edge weights
603 
604    Level: intermediate
605 
606 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
607 @*/
608 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
609 {
610   PetscErrorCode ierr;
611 
612   PetscFunctionBegin;
613   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
614   PetscFunctionReturn(0);
615 }
616 
617 #undef __FUNCT__
618 #define __FUNCT__ "MatCreateMPIAdj"
619 /*@C
620    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
621    The matrix does not have numerical values associated with it, but is
622    intended for ordering (to reduce bandwidth etc) and partitioning.
623 
624    Collective on MPI_Comm
625 
626    Input Parameters:
627 +  comm - MPI communicator
628 .  m - number of local rows
629 .  N - number of global columns
630 .  i - the indices into j for the start of each row
631 .  j - the column indices for each row (sorted for each row).
632        The indices in i and j start with zero (NOT with one).
633 -  values -[optional] edge weights
634 
635    Output Parameter:
636 .  A - the matrix
637 
638    Level: intermediate
639 
640    Notes: This matrix object does not support most matrix operations, include
641    MatSetValues().
642    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
643    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
644     call from Fortran you need not create the arrays with PetscMalloc().
645    Should not include the matrix diagonals.
646 
647    If you already have a matrix, you can create its adjacency matrix by a call
648    to MatConvert, specifying a type of MATMPIADJ.
649 
650    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
651 
652 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
653 @*/
654 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
655 {
656   PetscErrorCode ierr;
657 
658   PetscFunctionBegin;
659   ierr = MatCreate(comm,A);CHKERRQ(ierr);
660   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
661   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
662   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
663   PetscFunctionReturn(0);
664 }
665 
666 
667 
668 
669 
670 
671 
672