xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 3a70edf7601440780780b0bc656d13c26155cd34)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
27     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
36     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNCT__
43 #define __FUNCT__ "MatView_MPIAdj"
44 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
45 {
46   PetscErrorCode ierr;
47   PetscBool      iascii;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   if (iascii) {
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) {
151     PetscInt j;
152     if (a->rowvalues_alloc < *nz) {
153       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
154       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
155       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
156     }
157     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
158     *v = (*nz) ? a->rowvalues : NULL;
159   }
160   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatRestoreRow_MPIAdj"
166 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
167 {
168   PetscFunctionBegin;
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "MatEqual_MPIAdj"
174 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
175 {
176   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177   PetscErrorCode ierr;
178   PetscBool      flag;
179 
180   PetscFunctionBegin;
181   /* If the  matrix dimensions are not equal,or no of nonzeros */
182   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
183     flag = PETSC_FALSE;
184   }
185 
186   /* if the a->i are the same */
187   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   /* if a->j are the same */
190   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
191 
192   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
198 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
199 {
200   PetscInt   i;
201   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
202   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
203 
204   PetscFunctionBegin;
205   *m    = A->rmap->n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
224   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatConvertFrom_MPIAdj"
240 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
241 {
242   Mat               B;
243   PetscErrorCode    ierr;
244   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
245   const PetscInt    *rj;
246   const PetscScalar *ra;
247   MPI_Comm          comm;
248 
249   PetscFunctionBegin;
250   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
251   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
252   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
253 
254   /* count the number of nonzeros per row */
255   for (i=0; i<m; i++) {
256     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257     for (j=0; j<len; j++) {
258       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
259     }
260     nzeros += len;
261     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
262   }
263 
264   /* malloc space for nonzeros */
265   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
266   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
267   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
268 
269   nzeros = 0;
270   ia[0]  = 0;
271   for (i=0; i<m; i++) {
272     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     cnt  = 0;
274     for (j=0; j<len; j++) {
275       if (rj[j] != i+rstart) { /* if not diagonal */
276         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
277         ja[nzeros+cnt++] = rj[j];
278       }
279     }
280     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
281     nzeros += cnt;
282     ia[i+1] = nzeros;
283   }
284 
285   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
286   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
287   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
288   ierr = MatSetType(B,type);CHKERRQ(ierr);
289   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
290 
291   if (reuse == MAT_REUSE_MATRIX) {
292     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
293   } else {
294     *newmat = B;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* -------------------------------------------------------------------*/
300 static struct _MatOps MatOps_Values = {0,
301                                        MatGetRow_MPIAdj,
302                                        MatRestoreRow_MPIAdj,
303                                        0,
304                                 /* 4*/ 0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*10*/ 0,
311                                        0,
312                                        0,
313                                        0,
314                                        0,
315                                 /*15*/ 0,
316                                        MatEqual_MPIAdj,
317                                        0,
318                                        0,
319                                        0,
320                                 /*20*/ 0,
321                                        0,
322                                        MatSetOption_MPIAdj,
323                                        0,
324                                 /*24*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*29*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*34*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*39*/ 0,
340                                        MatGetSubMatrices_MPIAdj,
341                                        0,
342                                        0,
343                                        0,
344                                 /*44*/ 0,
345                                        0,
346                                        MatShift_Basic,
347                                        0,
348                                        0,
349                                 /*49*/ 0,
350                                        MatGetRowIJ_MPIAdj,
351                                        MatRestoreRowIJ_MPIAdj,
352                                        0,
353                                        0,
354                                 /*54*/ 0,
355                                        0,
356                                        0,
357                                        0,
358                                        0,
359                                 /*59*/ 0,
360                                        MatDestroy_MPIAdj,
361                                        MatView_MPIAdj,
362                                        MatConvertFrom_MPIAdj,
363                                        0,
364                                 /*64*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*69*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*74*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*79*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*84*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*89*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*94*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                 /*99*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*104*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*109*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*114*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*119*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*124*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        MatGetSubMatricesMPI_MPIAdj,
429                                /*129*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*134*/ 0,
435                                        0,
436                                        0,
437                                        0,
438                                        0,
439                                /*139*/ 0,
440                                        0,
441                                        0
442 };
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
446 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
447 {
448   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
449   PetscErrorCode ierr;
450 #if defined(PETSC_USE_DEBUG)
451   PetscInt ii;
452 #endif
453 
454   PetscFunctionBegin;
455   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
456   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
457 
458 #if defined(PETSC_USE_DEBUG)
459   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
460   for (ii=1; ii<B->rmap->n; ii++) {
461     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
462   }
463   for (ii=0; ii<i[B->rmap->n]; ii++) {
464     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
465   }
466 #endif
467   B->preallocated = PETSC_TRUE;
468 
469   b->j      = j;
470   b->i      = i;
471   b->values = values;
472 
473   b->nz        = i[B->rmap->n];
474   b->diag      = 0;
475   b->symmetric = PETSC_FALSE;
476   b->freeaij   = PETSC_TRUE;
477 
478   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values);
484 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
485 
486 #undef __FUNCT__
487 #define __FUNCT__ "MatGetSubMatricesMPI_MPIAdj"
488 PetscErrorCode MatGetSubMatricesMPI_MPIAdj(Mat mat,PetscInt n, const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
489 {
490   PetscErrorCode     ierr;
491   /*get sub-matrices across a sub communicator */
492   PetscFunctionBegin;
493   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_TRUE,scall,submat);CHKERRQ(ierr);
494   PetscFunctionReturn(0);
495 }
496 
497 
498 #undef __FUNCT__
499 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
500 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
501 {
502   PetscErrorCode     ierr;
503 
504   PetscFunctionBegin;
505   /*get sub-matrices based on PETSC_COMM_SELF */
506   ierr = MatGetSubMatrices_MPIAdj_Private(mat,n,irow,icol,PETSC_FALSE,scall,submat);CHKERRQ(ierr);
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatGetSubMatrices_MPIAdj_Private"
512 static PetscErrorCode MatGetSubMatrices_MPIAdj_Private(Mat mat,PetscInt n,const IS irow[],const IS icol[],PetscBool subcomm,MatReuse scall,Mat *submat[])
513 {
514   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
515   PetscInt          *indices,nindx,j,k,loc,issame;
516   const PetscInt    *irow_indices,*icol_indices;
517   MPI_Comm           scomm_row,scomm_col,scomm_mat;
518   PetscErrorCode     ierr;
519 
520   PetscFunctionBegin;
521   nindx = 0;
522   /*
523    * Estimate a maximum number for allocating memory
524    */
525   for(i=0; i<n; i++){
526     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
527     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
528     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
529   }
530   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
531   /* construct a submat */
532   for(i=0; i<n; i++){
533 	/*comms */
534     if(subcomm){
535 	  ierr = PetscObjectGetComm((PetscObject)irow[i],&scomm_row);CHKERRQ(ierr);
536 	  ierr = PetscObjectGetComm((PetscObject)icol[i],&scomm_col);CHKERRQ(ierr);
537 	  ierr = MPI_Comm_compare(scomm_row,scomm_col,&issame);CHKERRQ(ierr);
538 	  if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"row index set must have the same comm as the col index set\n");
539 	  ierr = MPI_Comm_compare(scomm_row,PETSC_COMM_SELF,&issame);CHKERRQ(ierr);
540 	  if(issame == MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP," can not use PETSC_COMM_SELF as comm when extracting a parallel submatrix\n");
541 	}else{
542 	  scomm_row = PETSC_COMM_SELF;
543 	}
544 	/*get sub-matrix data*/
545     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
546     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
547     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
548     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
549     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
550     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
551     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
552     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
553     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
554     nindx = irow_n+icol_n;
555     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
556     /* renumber columns */
557     for(j=0; j<irow_n; j++){
558       for(k=sxadj[j]; k<sxadj[j+1]; k++){
559     	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
560 #if PETSC_USE_DEBUG
561     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
562 #endif
563         sadjncy[k] = loc;
564       }
565     }
566     if(scall==MAT_INITIAL_MATRIX){
567       ierr = MatCreateMPIAdj(scomm_row,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
568     }else{
569        Mat                sadj = *(submat[i]);
570        ierr = PetscObjectGetComm((PetscObject)sadj,&scomm_mat);CHKERRQ(ierr);
571        ierr = MPI_Comm_compare(scomm_row,scomm_mat,&issame);CHKERRQ(ierr);
572        if(issame != MPI_IDENT) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_INCOMP,"submatrix  must have the same comm as the col index set\n");
573        Mat_MPIAdj        *sa = (Mat_MPIAdj*)((sadj)->data);
574        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
575        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
576        if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
577        ierr = PetscFree(sxadj);CHKERRQ(ierr);
578        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
579        if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
580     }
581   }
582   ierr = PetscFree(indices);CHKERRQ(ierr);
583   PetscFunctionReturn(0);
584 }
585 
586 
587 #undef __FUNCT__
588 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
589 /*
590  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
591  * */
592 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
593 {
594   PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
595   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
596   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
597   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
598   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
599   PetscLayout        rmap;
600   MPI_Comm           comm;
601   PetscSF            sf;
602   PetscSFNode       *iremote;
603   PetscBool          done;
604   PetscErrorCode     ierr;
605 
606   PetscFunctionBegin;
607   /* communicator */
608   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
609   /* Layouts */
610   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
611   /* get rows information */
612   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
613   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
614   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
615   /* construct sf graph*/
616   nleaves = nlrows_is;
617   for(i=0; i<nlrows_is; i++){
618     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
619     iremote[i].rank  = owner;
620     iremote[i].index = rlocalindex;
621   }
622   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
623   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
624   nroots = nlrows_mat;
625   for(i=0; i<nlrows_mat; i++){
626 	ncols_send[i] = xadj[i+1]-xadj[i];
627   }
628   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
629   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
630   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
631   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
632   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
633   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
634   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
635   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
636   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
637   Ncols_recv =0;
638   for(i=0; i<nlrows_is; i++){
639 	 Ncols_recv             += ncols_recv[i];
640 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
641   }
642   Ncols_send = 0;
643   for(i=0; i<nlrows_mat; i++){
644 	Ncols_send += ncols_send[i];
645   }
646   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
647   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
648   nleaves = Ncols_recv;
649   Ncols_recv = 0;
650   for(i=0; i<nlrows_is; i++){
651     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
652     for(j=0; j<ncols_recv[i]; j++){
653       iremote[Ncols_recv].rank    = owner;
654       iremote[Ncols_recv++].index = xadj_recv[i]+j;
655     }
656   }
657   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
658   /*if we need to deal with edge weights ???*/
659   if(a->values){isvalue=1;}else{isvalue=0;}
660   /*involve a global communication */
661   /*ierr = MPI_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
662   if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
663   nroots = Ncols_send;
664   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
665   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
666   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
667   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
668   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
669   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
670   if(isvalue){
671 	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
672 	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
673   }
674   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
675   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
676   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
677   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
678   rnclos = 0;
679   for(i=0; i<nlrows_is; i++){
680     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
681       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
682       if(loc<0){
683         adjncy_recv[j] = -1;
684         if(isvalue) values_recv[j] = -1;
685         ncols_recv[i]--;
686       }else{
687     	rnclos++;
688       }
689     }
690   }
691   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
692   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
693   if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
694   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
695   rnclos = 0;
696   for(i=0; i<nlrows_is; i++){
697 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
698 	  if(adjncy_recv[j]<0) continue;
699 	  sadjncy[rnclos] = adjncy_recv[j];
700 	  if(isvalue) svalues[rnclos] = values_recv[j];
701 	  rnclos++;
702 	}
703   }
704   for(i=0; i<nlrows_is; i++){
705 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
706   }
707   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
708   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
709   if(sadj_values){
710 	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
711   }else{
712 	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
713   }
714   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
715   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
716   if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
717   PetscFunctionReturn(0);
718 }
719 
720 
721 #undef __FUNCT__
722 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
723 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
724 {
725   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
726   PetscErrorCode ierr;
727   const PetscInt *ranges;
728   MPI_Comm       acomm,bcomm;
729   MPI_Group      agroup,bgroup;
730   PetscMPIInt    i,rank,size,nranks,*ranks;
731 
732   PetscFunctionBegin;
733   *B    = NULL;
734   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
735   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
736   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
737   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
738   for (i=0,nranks=0; i<size; i++) {
739     if (ranges[i+1] - ranges[i] > 0) nranks++;
740   }
741   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
742     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
743     *B   = A;
744     PetscFunctionReturn(0);
745   }
746 
747   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
748   for (i=0,nranks=0; i<size; i++) {
749     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
750   }
751   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
752   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
753   ierr = PetscFree(ranks);CHKERRQ(ierr);
754   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
755   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
756   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
757   if (bcomm != MPI_COMM_NULL) {
758     PetscInt   m,N;
759     Mat_MPIAdj *b;
760     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
761     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
762     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
763     b          = (Mat_MPIAdj*)(*B)->data;
764     b->freeaij = PETSC_FALSE;
765     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
766   }
767   PetscFunctionReturn(0);
768 }
769 
770 #undef __FUNCT__
771 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
772 /*@
773    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
774 
775    Collective
776 
777    Input Arguments:
778 .  A - original MPIAdj matrix
779 
780    Output Arguments:
781 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
782 
783    Level: developer
784 
785    Note:
786    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
787 
788    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
789 
790 .seealso: MatCreateMPIAdj()
791 @*/
792 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
793 {
794   PetscErrorCode ierr;
795 
796   PetscFunctionBegin;
797   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
798   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
799   PetscFunctionReturn(0);
800 }
801 
802 /*MC
803    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
804    intended for use constructing orderings and partitionings.
805 
806   Level: beginner
807 
808 .seealso: MatCreateMPIAdj
809 M*/
810 
811 #undef __FUNCT__
812 #define __FUNCT__ "MatCreate_MPIAdj"
813 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
814 {
815   Mat_MPIAdj     *b;
816   PetscErrorCode ierr;
817 
818   PetscFunctionBegin;
819   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
820   B->data      = (void*)b;
821   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
822   B->assembled = PETSC_FALSE;
823 
824   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
825   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
826   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
827   PetscFunctionReturn(0);
828 }
829 
830 #undef __FUNCT__
831 #define __FUNCT__ "MatMPIAdjSetPreallocation"
832 /*@C
833    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
834 
835    Logically Collective on MPI_Comm
836 
837    Input Parameters:
838 +  A - the matrix
839 .  i - the indices into j for the start of each row
840 .  j - the column indices for each row (sorted for each row).
841        The indices in i and j start with zero (NOT with one).
842 -  values - [optional] edge weights
843 
844    Level: intermediate
845 
846 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
847 @*/
848 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
849 {
850   PetscErrorCode ierr;
851 
852   PetscFunctionBegin;
853   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
854   PetscFunctionReturn(0);
855 }
856 
857 #undef __FUNCT__
858 #define __FUNCT__ "MatCreateMPIAdj"
859 /*@C
860    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
861    The matrix does not have numerical values associated with it, but is
862    intended for ordering (to reduce bandwidth etc) and partitioning.
863 
864    Collective on MPI_Comm
865 
866    Input Parameters:
867 +  comm - MPI communicator
868 .  m - number of local rows
869 .  N - number of global columns
870 .  i - the indices into j for the start of each row
871 .  j - the column indices for each row (sorted for each row).
872        The indices in i and j start with zero (NOT with one).
873 -  values -[optional] edge weights
874 
875    Output Parameter:
876 .  A - the matrix
877 
878    Level: intermediate
879 
880    Notes: This matrix object does not support most matrix operations, include
881    MatSetValues().
882    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
883    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
884     call from Fortran you need not create the arrays with PetscMalloc().
885    Should not include the matrix diagonals.
886 
887    If you already have a matrix, you can create its adjacency matrix by a call
888    to MatConvert, specifying a type of MATMPIADJ.
889 
890    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
891 
892 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
893 @*/
894 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
895 {
896   PetscErrorCode ierr;
897 
898   PetscFunctionBegin;
899   ierr = MatCreate(comm,A);CHKERRQ(ierr);
900   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
901   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
902   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
903   PetscFunctionReturn(0);
904 }
905 
906 
907 
908 
909 
910 
911 
912