xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 92b6f2a0dbea4180a547db7f4f40051ee6e85eb8)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->rmap->n;
14   const char        *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
26     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
35     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscBool      iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
66 #endif
67   ierr = PetscFree(a->diag);CHKERRQ(ierr);
68   if (a->freeaij) {
69     if (a->freeaijwithfree) {
70       if (a->i) free(a->i);
71       if (a->j) free(a->j);
72     } else {
73       ierr = PetscFree(a->i);CHKERRQ(ierr);
74       ierr = PetscFree(a->j);CHKERRQ(ierr);
75       ierr = PetscFree(a->values);CHKERRQ(ierr);
76     }
77   }
78   ierr = PetscFree(mat->data);CHKERRQ(ierr);
79   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
80   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
81   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
82   PetscFunctionReturn(0);
83 }
84 
85 #undef __FUNCT__
86 #define __FUNCT__ "MatSetOption_MPIAdj"
87 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
88 {
89   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
90   PetscErrorCode ierr;
91 
92   PetscFunctionBegin;
93   switch (op) {
94   case MAT_SYMMETRIC:
95   case MAT_STRUCTURALLY_SYMMETRIC:
96   case MAT_HERMITIAN:
97     a->symmetric = flg;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100     break;
101   default:
102     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
103     break;
104   }
105   PetscFunctionReturn(0);
106 }
107 
108 
109 /*
110      Adds diagonal pointers to sparse matrix structure.
111 */
112 
113 #undef __FUNCT__
114 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
115 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
116 {
117   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
118   PetscErrorCode ierr;
119   PetscInt       i,j,m = A->rmap->n;
120 
121   PetscFunctionBegin;
122   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
123   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
124   for (i=0; i<A->rmap->n; i++) {
125     for (j=a->i[i]; j<a->i[i+1]; j++) {
126       if (a->j[j] == i) {
127         a->diag[i] = j;
128         break;
129       }
130     }
131   }
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140 
141   PetscFunctionBegin;
142   row -= A->rmap->rstart;
143 
144   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
145 
146   *nz = a->i[row+1] - a->i[row];
147   if (v) *v = NULL;
148   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
149   PetscFunctionReturn(0);
150 }
151 
152 #undef __FUNCT__
153 #define __FUNCT__ "MatRestoreRow_MPIAdj"
154 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
155 {
156   PetscFunctionBegin;
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "MatEqual_MPIAdj"
162 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
163 {
164   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
165   PetscErrorCode ierr;
166   PetscBool      flag;
167 
168   PetscFunctionBegin;
169   /* If the  matrix dimensions are not equal,or no of nonzeros */
170   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
171     flag = PETSC_FALSE;
172   }
173 
174   /* if the a->i are the same */
175   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
176 
177   /* if a->j are the same */
178   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
179 
180   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
181   PetscFunctionReturn(0);
182 }
183 
184 #undef __FUNCT__
185 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
186 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
187 {
188   PetscInt   i;
189   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
190   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
191 
192   PetscFunctionBegin;
193   *m    = A->rmap->n;
194   *ia   = a->i;
195   *ja   = a->j;
196   *done = PETSC_TRUE;
197   if (oshift) {
198     for (i=0; i<(*ia)[*m]; i++) {
199       (*ja)[i]++;
200     }
201     for (i=0; i<=(*m); i++) (*ia)[i]++;
202   }
203   PetscFunctionReturn(0);
204 }
205 
206 #undef __FUNCT__
207 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
208 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
209 {
210   PetscInt   i;
211   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
212   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
213 
214   PetscFunctionBegin;
215   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
216   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
217   if (oshift) {
218     for (i=0; i<=(*m); i++) (*ia)[i]--;
219     for (i=0; i<(*ia)[*m]; i++) {
220       (*ja)[i]--;
221     }
222   }
223   PetscFunctionReturn(0);
224 }
225 
226 #undef __FUNCT__
227 #define __FUNCT__ "MatConvertFrom_MPIAdj"
228 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
229 {
230   Mat               B;
231   PetscErrorCode    ierr;
232   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
233   const PetscInt    *rj;
234   const PetscScalar *ra;
235   MPI_Comm          comm;
236 
237   PetscFunctionBegin;
238   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
239   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
240   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
241 
242   /* count the number of nonzeros per row */
243   for (i=0; i<m; i++) {
244     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
245     for (j=0; j<len; j++) {
246       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
247     }
248     nzeros += len;
249     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
250   }
251 
252   /* malloc space for nonzeros */
253   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
254   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
255   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
256 
257   nzeros = 0;
258   ia[0]  = 0;
259   for (i=0; i<m; i++) {
260     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
261     cnt  = 0;
262     for (j=0; j<len; j++) {
263       if (rj[j] != i+rstart) { /* if not diagonal */
264         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
265         ja[nzeros+cnt++] = rj[j];
266       }
267     }
268     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
269     nzeros += cnt;
270     ia[i+1] = nzeros;
271   }
272 
273   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
274   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
275   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
276   ierr = MatSetType(B,type);CHKERRQ(ierr);
277   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
278 
279   if (reuse == MAT_REUSE_MATRIX) {
280     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
281   } else {
282     *newmat = B;
283   }
284   PetscFunctionReturn(0);
285 }
286 
287 /* -------------------------------------------------------------------*/
288 static struct _MatOps MatOps_Values = {0,
289                                        MatGetRow_MPIAdj,
290                                        MatRestoreRow_MPIAdj,
291                                        0,
292                                 /* 4*/ 0,
293                                        0,
294                                        0,
295                                        0,
296                                        0,
297                                        0,
298                                 /*10*/ 0,
299                                        0,
300                                        0,
301                                        0,
302                                        0,
303                                 /*15*/ 0,
304                                        MatEqual_MPIAdj,
305                                        0,
306                                        0,
307                                        0,
308                                 /*20*/ 0,
309                                        0,
310                                        MatSetOption_MPIAdj,
311                                        0,
312                                 /*24*/ 0,
313                                        0,
314                                        0,
315                                        0,
316                                        0,
317                                 /*29*/ 0,
318                                        0,
319                                        0,
320                                        0,
321                                        0,
322                                 /*34*/ 0,
323                                        0,
324                                        0,
325                                        0,
326                                        0,
327                                 /*39*/ 0,
328                                        0,
329                                        0,
330                                        0,
331                                        0,
332                                 /*44*/ 0,
333                                        0,
334                                        0,
335                                        0,
336                                        0,
337                                 /*49*/ 0,
338                                        MatGetRowIJ_MPIAdj,
339                                        MatRestoreRowIJ_MPIAdj,
340                                        0,
341                                        0,
342                                 /*54*/ 0,
343                                        0,
344                                        0,
345                                        0,
346                                        0,
347                                 /*59*/ 0,
348                                        MatDestroy_MPIAdj,
349                                        MatView_MPIAdj,
350                                        MatConvertFrom_MPIAdj,
351                                        0,
352                                 /*64*/ 0,
353                                        0,
354                                        0,
355                                        0,
356                                        0,
357                                 /*69*/ 0,
358                                        0,
359                                        0,
360                                        0,
361                                        0,
362                                 /*74*/ 0,
363                                        0,
364                                        0,
365                                        0,
366                                        0,
367                                 /*79*/ 0,
368                                        0,
369                                        0,
370                                        0,
371                                        0,
372                                 /*84*/ 0,
373                                        0,
374                                        0,
375                                        0,
376                                        0,
377                                 /*89*/ 0,
378                                        0,
379                                        0,
380                                        0,
381                                        0,
382                                 /*94*/ 0,
383                                        0,
384                                        0,
385                                        0,
386                                        0,
387                                 /*99*/ 0,
388                                        0,
389                                        0,
390                                        0,
391                                        0,
392                                /*104*/ 0,
393                                        0,
394                                        0,
395                                        0,
396                                        0,
397                                /*109*/ 0,
398                                        0,
399                                        0,
400                                        0,
401                                        0,
402                                /*114*/ 0,
403                                        0,
404                                        0,
405                                        0,
406                                        0,
407                                /*119*/ 0,
408                                        0,
409                                        0,
410                                        0,
411                                        0,
412                                /*124*/ 0,
413                                        0,
414                                        0,
415                                        0,
416                                        0,
417                                /*129*/ 0,
418                                        0,
419                                        0,
420                                        0,
421                                        0,
422                                /*134*/ 0,
423                                        0,
424                                        0,
425                                        0,
426                                        0,
427                                /*139*/ 0,
428                                        0
429 };
430 
431 #undef __FUNCT__
432 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
433 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
434 {
435   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
436   PetscErrorCode ierr;
437 #if defined(PETSC_USE_DEBUG)
438   PetscInt ii;
439 #endif
440 
441   PetscFunctionBegin;
442   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
443   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
444 
445 #if defined(PETSC_USE_DEBUG)
446   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
447   for (ii=1; ii<B->rmap->n; ii++) {
448     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
449   }
450   for (ii=0; ii<i[B->rmap->n]; ii++) {
451     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
452   }
453 #endif
454   B->preallocated = PETSC_TRUE;
455 
456   b->j      = j;
457   b->i      = i;
458   b->values = values;
459 
460   b->nz        = i[B->rmap->n];
461   b->diag      = 0;
462   b->symmetric = PETSC_FALSE;
463   b->freeaij   = PETSC_TRUE;
464 
465   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
466   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
467   PetscFunctionReturn(0);
468 }
469 
470 #undef __FUNCT__
471 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
472 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
473 {
474   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
475   PetscErrorCode ierr;
476   const PetscInt *ranges;
477   MPI_Comm       acomm,bcomm;
478   MPI_Group      agroup,bgroup;
479   PetscMPIInt    i,rank,size,nranks,*ranks;
480 
481   PetscFunctionBegin;
482   *B    = NULL;
483   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
484   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
485   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
486   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
487   for (i=0,nranks=0; i<size; i++) {
488     if (ranges[i+1] - ranges[i] > 0) nranks++;
489   }
490   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
491     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
492     *B   = A;
493     PetscFunctionReturn(0);
494   }
495 
496   ierr = PetscMalloc(nranks*sizeof(PetscMPIInt),&ranks);CHKERRQ(ierr);
497   for (i=0,nranks=0; i<size; i++) {
498     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
499   }
500   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
501   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
502   ierr = PetscFree(ranks);CHKERRQ(ierr);
503   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
504   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
505   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
506   if (bcomm != MPI_COMM_NULL) {
507     PetscInt   m,N;
508     Mat_MPIAdj *b;
509     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
510     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
511     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
512     b          = (Mat_MPIAdj*)(*B)->data;
513     b->freeaij = PETSC_FALSE;
514     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
515   }
516   PetscFunctionReturn(0);
517 }
518 
519 #undef __FUNCT__
520 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
521 /*@
522    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
523 
524    Collective
525 
526    Input Arguments:
527 .  A - original MPIAdj matrix
528 
529    Output Arguments:
530 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
531 
532    Level: developer
533 
534    Note:
535    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
536 
537    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
538 
539 .seealso: MatCreateMPIAdj()
540 @*/
541 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
542 {
543   PetscErrorCode ierr;
544 
545   PetscFunctionBegin;
546   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
547   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
548   PetscFunctionReturn(0);
549 }
550 
551 /*MC
552    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
553    intended for use constructing orderings and partitionings.
554 
555   Level: beginner
556 
557 .seealso: MatCreateMPIAdj
558 M*/
559 
560 #undef __FUNCT__
561 #define __FUNCT__ "MatCreate_MPIAdj"
562 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
563 {
564   Mat_MPIAdj     *b;
565   PetscErrorCode ierr;
566 
567   PetscFunctionBegin;
568   ierr         = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
569   B->data      = (void*)b;
570   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
571   B->assembled = PETSC_FALSE;
572 
573   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
574   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
575   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
576   PetscFunctionReturn(0);
577 }
578 
579 #undef __FUNCT__
580 #define __FUNCT__ "MatMPIAdjSetPreallocation"
581 /*@C
582    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
583 
584    Logically Collective on MPI_Comm
585 
586    Input Parameters:
587 +  A - the matrix
588 .  i - the indices into j for the start of each row
589 .  j - the column indices for each row (sorted for each row).
590        The indices in i and j start with zero (NOT with one).
591 -  values - [optional] edge weights
592 
593    Level: intermediate
594 
595 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
596 @*/
597 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
598 {
599   PetscErrorCode ierr;
600 
601   PetscFunctionBegin;
602   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
603   PetscFunctionReturn(0);
604 }
605 
606 #undef __FUNCT__
607 #define __FUNCT__ "MatCreateMPIAdj"
608 /*@C
609    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
610    The matrix does not have numerical values associated with it, but is
611    intended for ordering (to reduce bandwidth etc) and partitioning.
612 
613    Collective on MPI_Comm
614 
615    Input Parameters:
616 +  comm - MPI communicator
617 .  m - number of local rows
618 .  N - number of global columns
619 .  i - the indices into j for the start of each row
620 .  j - the column indices for each row (sorted for each row).
621        The indices in i and j start with zero (NOT with one).
622 -  values -[optional] edge weights
623 
624    Output Parameter:
625 .  A - the matrix
626 
627    Level: intermediate
628 
629    Notes: This matrix object does not support most matrix operations, include
630    MatSetValues().
631    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
632    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
633     call from Fortran you need not create the arrays with PetscMalloc().
634    Should not include the matrix diagonals.
635 
636    If you already have a matrix, you can create its adjacency matrix by a call
637    to MatConvert, specifying a type of MATMPIADJ.
638 
639    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
640 
641 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
642 @*/
643 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
644 {
645   PetscErrorCode ierr;
646 
647   PetscFunctionBegin;
648   ierr = MatCreate(comm,A);CHKERRQ(ierr);
649   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
650   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
651   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
652   PetscFunctionReturn(0);
653 }
654 
655 
656 
657 
658 
659 
660 
661