xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 2253a2e4123e26ac60222a555ffca91aff50e63b)
1 
2 /*
3     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
4 */
5 #include <../src/mat/impls/adj/mpi/mpiadj.h>    /*I "petscmat.h" I*/
6 #include <petscsf.h>
7 
8 #undef __FUNCT__
9 #define __FUNCT__ "MatView_MPIAdj_ASCII"
10 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
11 {
12   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
13   PetscErrorCode    ierr;
14   PetscInt          i,j,m = A->rmap->n;
15   const char        *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PetscObjectComm((PetscObject)A),PETSC_ERR_SUP,"MATLAB format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_FALSE);CHKERRQ(ierr);
27     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_TRUE);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_TRUE);CHKERRQ(ierr);
36     ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37     ierr = PetscViewerASCIISynchronizedAllow(viewer,PETSC_FALSE);CHKERRQ(ierr);
38   }
39   PetscFunctionReturn(0);
40 }
41 
42 #undef __FUNCT__
43 #define __FUNCT__ "MatView_MPIAdj"
44 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
45 {
46   PetscErrorCode ierr;
47   PetscBool      iascii;
48 
49   PetscFunctionBegin;
50   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
51   if (iascii) {
52     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
62   PetscErrorCode ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
67 #endif
68   ierr = PetscFree(a->diag);CHKERRQ(ierr);
69   if (a->freeaij) {
70     if (a->freeaijwithfree) {
71       if (a->i) free(a->i);
72       if (a->j) free(a->j);
73     } else {
74       ierr = PetscFree(a->i);CHKERRQ(ierr);
75       ierr = PetscFree(a->j);CHKERRQ(ierr);
76       ierr = PetscFree(a->values);CHKERRQ(ierr);
77     }
78   }
79   ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
80   ierr = PetscFree(mat->data);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C",NULL);CHKERRQ(ierr);
83   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjCreateNonemptySubcommMat_C",NULL);CHKERRQ(ierr);
84   PetscFunctionReturn(0);
85 }
86 
87 #undef __FUNCT__
88 #define __FUNCT__ "MatSetOption_MPIAdj"
89 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscBool flg)
90 {
91   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
92   PetscErrorCode ierr;
93 
94   PetscFunctionBegin;
95   switch (op) {
96   case MAT_SYMMETRIC:
97   case MAT_STRUCTURALLY_SYMMETRIC:
98   case MAT_HERMITIAN:
99     a->symmetric = flg;
100     break;
101   case MAT_SYMMETRY_ETERNAL:
102     break;
103   default:
104     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
105     break;
106   }
107   PetscFunctionReturn(0);
108 }
109 
110 
111 /*
112      Adds diagonal pointers to sparse matrix structure.
113 */
114 
115 #undef __FUNCT__
116 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
117 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
118 {
119   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
120   PetscErrorCode ierr;
121   PetscInt       i,j,m = A->rmap->n;
122 
123   PetscFunctionBegin;
124   ierr = PetscMalloc1(m,&a->diag);CHKERRQ(ierr);
125   ierr = PetscLogObjectMemory((PetscObject)A,m*sizeof(PetscInt));CHKERRQ(ierr);
126   for (i=0; i<A->rmap->n; i++) {
127     for (j=a->i[i]; j<a->i[i+1]; j++) {
128       if (a->j[j] == i) {
129         a->diag[i] = j;
130         break;
131       }
132     }
133   }
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscErrorCode ierr;
143 
144   PetscFunctionBegin;
145   row -= A->rmap->rstart;
146 
147   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) {
151     PetscInt j;
152     if (a->rowvalues_alloc < *nz) {
153       ierr = PetscFree(a->rowvalues);CHKERRQ(ierr);
154       a->rowvalues_alloc = PetscMax(a->rowvalues_alloc*2, *nz);
155       ierr = PetscMalloc1(a->rowvalues_alloc,&a->rowvalues);CHKERRQ(ierr);
156     }
157     for (j=0; j<*nz; j++) a->rowvalues[j] = a->values[a->i[row]+j];
158     *v = (*nz) ? a->rowvalues : NULL;
159   }
160   if (idx) *idx = (*nz) ? a->j + a->i[row] : NULL;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatRestoreRow_MPIAdj"
166 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
167 {
168   PetscFunctionBegin;
169   PetscFunctionReturn(0);
170 }
171 
172 #undef __FUNCT__
173 #define __FUNCT__ "MatEqual_MPIAdj"
174 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscBool * flg)
175 {
176   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data,*b = (Mat_MPIAdj*)B->data;
177   PetscErrorCode ierr;
178   PetscBool      flag;
179 
180   PetscFunctionBegin;
181   /* If the  matrix dimensions are not equal,or no of nonzeros */
182   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
183     flag = PETSC_FALSE;
184   }
185 
186   /* if the a->i are the same */
187   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   /* if a->j are the same */
190   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
191 
192   ierr = MPI_Allreduce(&flag,flg,1,MPIU_BOOL,MPI_LAND,PetscObjectComm((PetscObject)A));CHKERRQ(ierr);
193   PetscFunctionReturn(0);
194 }
195 
196 #undef __FUNCT__
197 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
198 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
199 {
200   PetscInt   i;
201   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
202   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
203 
204   PetscFunctionBegin;
205   *m    = A->rmap->n;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscBool symmetric,PetscBool blockcompressed,PetscInt *m,const PetscInt *inia[],const PetscInt *inja[],PetscBool  *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a   = (Mat_MPIAdj*)A->data;
224   PetscInt   **ia = (PetscInt**)inia,**ja = (PetscInt**)inja;
225 
226   PetscFunctionBegin;
227   if (ia && a->i != *ia) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
228   if (ja && a->j != *ja) SETERRQ(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
229   if (oshift) {
230     for (i=0; i<=(*m); i++) (*ia)[i]--;
231     for (i=0; i<(*ia)[*m]; i++) {
232       (*ja)[i]--;
233     }
234   }
235   PetscFunctionReturn(0);
236 }
237 
238 #undef __FUNCT__
239 #define __FUNCT__ "MatConvertFrom_MPIAdj"
240 PetscErrorCode  MatConvertFrom_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
241 {
242   Mat               B;
243   PetscErrorCode    ierr;
244   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
245   const PetscInt    *rj;
246   const PetscScalar *ra;
247   MPI_Comm          comm;
248 
249   PetscFunctionBegin;
250   ierr = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
251   ierr = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
252   ierr = MatGetOwnershipRange(A,&rstart,NULL);CHKERRQ(ierr);
253 
254   /* count the number of nonzeros per row */
255   for (i=0; i<m; i++) {
256     ierr = MatGetRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
257     for (j=0; j<len; j++) {
258       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
259     }
260     nzeros += len;
261     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,NULL);CHKERRQ(ierr);
262   }
263 
264   /* malloc space for nonzeros */
265   ierr = PetscMalloc1(nzeros+1,&a);CHKERRQ(ierr);
266   ierr = PetscMalloc1(N+1,&ia);CHKERRQ(ierr);
267   ierr = PetscMalloc1(nzeros+1,&ja);CHKERRQ(ierr);
268 
269   nzeros = 0;
270   ia[0]  = 0;
271   for (i=0; i<m; i++) {
272     ierr = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
273     cnt  = 0;
274     for (j=0; j<len; j++) {
275       if (rj[j] != i+rstart) { /* if not diagonal */
276         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
277         ja[nzeros+cnt++] = rj[j];
278       }
279     }
280     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
281     nzeros += cnt;
282     ia[i+1] = nzeros;
283   }
284 
285   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
286   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
287   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
288   ierr = MatSetType(B,type);CHKERRQ(ierr);
289   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
290 
291   if (reuse == MAT_REUSE_MATRIX) {
292     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
293   } else {
294     *newmat = B;
295   }
296   PetscFunctionReturn(0);
297 }
298 
299 /* -------------------------------------------------------------------*/
300 static struct _MatOps MatOps_Values = {0,
301                                        MatGetRow_MPIAdj,
302                                        MatRestoreRow_MPIAdj,
303                                        0,
304                                 /* 4*/ 0,
305                                        0,
306                                        0,
307                                        0,
308                                        0,
309                                        0,
310                                 /*10*/ 0,
311                                        0,
312                                        0,
313                                        0,
314                                        0,
315                                 /*15*/ 0,
316                                        MatEqual_MPIAdj,
317                                        0,
318                                        0,
319                                        0,
320                                 /*20*/ 0,
321                                        0,
322                                        MatSetOption_MPIAdj,
323                                        0,
324                                 /*24*/ 0,
325                                        0,
326                                        0,
327                                        0,
328                                        0,
329                                 /*29*/ 0,
330                                        0,
331                                        0,
332                                        0,
333                                        0,
334                                 /*34*/ 0,
335                                        0,
336                                        0,
337                                        0,
338                                        0,
339                                 /*39*/ 0,
340                                        MatGetSubMatrices_MPIAdj,
341                                        0,
342                                        0,
343                                        0,
344                                 /*44*/ 0,
345                                        0,
346                                        MatShift_Basic,
347                                        0,
348                                        0,
349                                 /*49*/ 0,
350                                        MatGetRowIJ_MPIAdj,
351                                        MatRestoreRowIJ_MPIAdj,
352                                        0,
353                                        0,
354                                 /*54*/ 0,
355                                        0,
356                                        0,
357                                        0,
358                                        0,
359                                 /*59*/ 0,
360                                        MatDestroy_MPIAdj,
361                                        MatView_MPIAdj,
362                                        MatConvertFrom_MPIAdj,
363                                        0,
364                                 /*64*/ 0,
365                                        0,
366                                        0,
367                                        0,
368                                        0,
369                                 /*69*/ 0,
370                                        0,
371                                        0,
372                                        0,
373                                        0,
374                                 /*74*/ 0,
375                                        0,
376                                        0,
377                                        0,
378                                        0,
379                                 /*79*/ 0,
380                                        0,
381                                        0,
382                                        0,
383                                        0,
384                                 /*84*/ 0,
385                                        0,
386                                        0,
387                                        0,
388                                        0,
389                                 /*89*/ 0,
390                                        0,
391                                        0,
392                                        0,
393                                        0,
394                                 /*94*/ 0,
395                                        0,
396                                        0,
397                                        0,
398                                        0,
399                                 /*99*/ 0,
400                                        0,
401                                        0,
402                                        0,
403                                        0,
404                                /*104*/ 0,
405                                        0,
406                                        0,
407                                        0,
408                                        0,
409                                /*109*/ 0,
410                                        0,
411                                        0,
412                                        0,
413                                        0,
414                                /*114*/ 0,
415                                        0,
416                                        0,
417                                        0,
418                                        0,
419                                /*119*/ 0,
420                                        0,
421                                        0,
422                                        0,
423                                        0,
424                                /*124*/ 0,
425                                        0,
426                                        0,
427                                        0,
428                                        0,
429                                /*129*/ 0,
430                                        0,
431                                        0,
432                                        0,
433                                        0,
434                                /*134*/ 0,
435                                        0,
436                                        0,
437                                        0,
438                                        0,
439                                /*139*/ 0,
440                                        0,
441                                        0
442 };
443 
444 #undef __FUNCT__
445 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
446 static PetscErrorCode  MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
447 {
448   Mat_MPIAdj     *b = (Mat_MPIAdj*)B->data;
449   PetscErrorCode ierr;
450 #if defined(PETSC_USE_DEBUG)
451   PetscInt ii;
452 #endif
453 
454   PetscFunctionBegin;
455   ierr = PetscLayoutSetUp(B->rmap);CHKERRQ(ierr);
456   ierr = PetscLayoutSetUp(B->cmap);CHKERRQ(ierr);
457 
458 #if defined(PETSC_USE_DEBUG)
459   if (i[0] != 0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
460   for (ii=1; ii<B->rmap->n; ii++) {
461     if (i[ii] < 0 || i[ii] < i[ii-1]) SETERRQ4(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
462   }
463   for (ii=0; ii<i[B->rmap->n]; ii++) {
464     if (j[ii] < 0 || j[ii] >= B->cmap->N) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
465   }
466 #endif
467   B->preallocated = PETSC_TRUE;
468 
469   b->j      = j;
470   b->i      = i;
471   b->values = values;
472 
473   b->nz        = i[B->rmap->n];
474   b->diag      = 0;
475   b->symmetric = PETSC_FALSE;
476   b->freeaij   = PETSC_TRUE;
477 
478   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
479   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
480   PetscFunctionReturn(0);
481 }
482 
483 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values);
484 
485 #undef __FUNCT__
486 #define __FUNCT__ "MatGetSubMatrices_MPIAdj"
487 PetscErrorCode MatGetSubMatrices_MPIAdj(Mat mat,PetscInt n,const IS irow[],const IS icol[],MatReuse scall,Mat *submat[])
488 {
489   PetscInt           i,irow_n,icol_n,*sxadj,*sadjncy,*svalues;
490   PetscInt          *indices,nindx,j,k,loc;
491   const PetscInt    *irow_indices,*icol_indices;
492   PetscErrorCode     ierr;
493 
494   PetscFunctionBegin;
495   nindx = 0;
496   for(i=0; i<n; i++){
497     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
498     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
499     nindx = nindx>(irow_n+icol_n)? nindx:(irow_n+icol_n);
500   }
501   ierr = PetscCalloc1(nindx,&indices);CHKERRQ(ierr);
502   for(i=0; i<n; i++){
503     ierr = MatGetSubMatrix_MPIAdj_data(mat,irow[i],icol[i],&sxadj,&sadjncy,&svalues);CHKERRQ(ierr);
504     ierr = ISGetLocalSize(irow[i],&irow_n);CHKERRQ(ierr);
505     ierr = ISGetLocalSize(icol[i],&icol_n);CHKERRQ(ierr);
506     ierr = ISGetIndices(irow[i],&irow_indices);CHKERRQ(ierr);
507     ierr = PetscMemcpy(indices,irow_indices,sizeof(PetscInt)*irow_n);CHKERRQ(ierr);
508     ierr = ISRestoreIndices(irow[i],&irow_indices);CHKERRQ(ierr);
509     ierr = ISGetIndices(icol[i],&icol_indices);CHKERRQ(ierr);
510     ierr = PetscMemcpy(indices+irow_n,icol_indices,sizeof(PetscInt)*icol_n);CHKERRQ(ierr);
511     ierr = ISRestoreIndices(icol[i],&icol_indices);CHKERRQ(ierr);
512     nindx = irow_n+icol_n;
513     ierr = PetscSortRemoveDupsInt(&nindx,indices);CHKERRQ(ierr);
514     /* renumber columns */
515     for(j=0; j<irow_n; j++){
516       for(k=sxadj[j]; k<sxadj[j+1]; k++){
517     	ierr = PetscFindInt(sadjncy[k],nindx,indices,&loc);CHKERRQ(ierr);
518 #if PETSC_USE_DEBUG
519     	if(loc<0) SETERRQ1(PETSC_COMM_SELF,PETSC_ERR_ARG_WRONG,"can not find col %d \n",sadjncy[k]);
520 #endif
521         sadjncy[k] = loc;
522       }
523     }
524     if(scall==MAT_INITIAL_MATRIX){
525       ierr = MatCreateMPIAdj(PETSC_COMM_SELF,irow_n,icol_n,sxadj,sadjncy,svalues,submat[i]);CHKERRQ(ierr);
526     }else{
527        Mat                sadj = *(submat[i]);
528        Mat_MPIAdj        *sa = (Mat_MPIAdj*)((sadj)->data);
529        ierr = PetscMemcpy(sa->i,sxadj,sizeof(PetscInt)*(irow_n+1));CHKERRQ(ierr);
530        ierr = PetscMemcpy(sa->j,sadjncy,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);
531        if(svalues){ierr = PetscMemcpy(sa->values,svalues,sizeof(PetscInt)*sxadj[irow_n]);CHKERRQ(ierr);}
532        ierr = PetscFree(sxadj);CHKERRQ(ierr);
533        ierr = PetscFree(sadjncy);CHKERRQ(ierr);
534        if(svalues) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
535     }
536   }
537   ierr = PetscFree(indices);CHKERRQ(ierr);
538   PetscFunctionReturn(0);
539 }
540 
541 
542 #undef __FUNCT__
543 #define __FUNCT__ "MatGetSubMatrix_MPIAdj_data"
544 /*
545  * The interface should be easy to use for both MatGetSubMatrix (parallel sub-matrix) and MatGetSubMatrices (sequential sub-matrices)
546  * */
547 static PetscErrorCode MatGetSubMatrix_MPIAdj_data(Mat adj,IS irows, IS icols, PetscInt **sadj_xadj,PetscInt **sadj_adjncy,PetscInt **sadj_values)
548 {
549   PetscInt        	 nlrows_is,icols_n,i,j,nroots,nleaves,owner,rlocalindex,*ncols_send,*ncols_recv;
550   PetscInt           nlrows_mat,*adjncy_recv,Ncols_recv,Ncols_send,*xadj_recv,*values_recv;
551   PetscInt          *ncols_recv_offsets,loc,rnclos,*sadjncy,*sxadj,*svalues,isvalue;
552   const PetscInt    *irows_indices,*icols_indices,*xadj, *adjncy;
553   Mat_MPIAdj        *a = (Mat_MPIAdj*)adj->data;
554   PetscLayout        rmap;
555   MPI_Comm           comm;
556   PetscSF            sf;
557   PetscSFNode       *iremote;
558   PetscBool          done;
559   PetscErrorCode     ierr;
560 
561   PetscFunctionBegin;
562   /* communicator */
563   ierr = PetscObjectGetComm((PetscObject)adj,&comm);CHKERRQ(ierr);
564   /* Layouts */
565   ierr = MatGetLayouts(adj,&rmap,PETSC_NULL);CHKERRQ(ierr);
566   /* get rows information */
567   ierr = ISGetLocalSize(irows,&nlrows_is);CHKERRQ(ierr);
568   ierr = ISGetIndices(irows,&irows_indices);CHKERRQ(ierr);
569   ierr = PetscCalloc1(nlrows_is,&iremote);CHKERRQ(ierr);
570   /* construct sf graph*/
571   nleaves = nlrows_is;
572   for(i=0; i<nlrows_is; i++){
573 	owner = -1;
574 	rlocalindex = -1;
575     ierr = PetscLayoutFindOwnerIndex(rmap,irows_indices[i],&owner,&rlocalindex);CHKERRQ(ierr);
576     iremote[i].rank  = owner;
577     iremote[i].index = rlocalindex;
578   }
579   ierr = MatGetRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
580   ierr = PetscCalloc4(nlrows_mat,&ncols_send,nlrows_is,&xadj_recv,nlrows_is+1,&ncols_recv_offsets,nlrows_is,&ncols_recv);CHKERRQ(ierr);
581   nroots = nlrows_mat;
582   for(i=0; i<nlrows_mat; i++){
583 	ncols_send[i] = xadj[i+1]-xadj[i];
584   }
585   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
586   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
587   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
588   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
589   ierr = PetscSFBcastBegin(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
590   ierr = PetscSFBcastEnd(sf,MPIU_INT,ncols_send,ncols_recv);CHKERRQ(ierr);
591   ierr = PetscSFBcastBegin(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
592   ierr = PetscSFBcastEnd(sf,MPIU_INT,xadj,xadj_recv);CHKERRQ(ierr);
593   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
594   Ncols_recv =0;
595   for(i=0; i<nlrows_is; i++){
596 	 Ncols_recv             += ncols_recv[i];
597 	 ncols_recv_offsets[i+1] = ncols_recv[i]+ncols_recv_offsets[i];
598   }
599   Ncols_send = 0;
600   for(i=0; i<nlrows_mat; i++){
601 	Ncols_send += ncols_send[i];
602   }
603   ierr = PetscCalloc1(Ncols_recv,&iremote);CHKERRQ(ierr);
604   ierr = PetscCalloc1(Ncols_recv,&adjncy_recv);CHKERRQ(ierr);
605   nleaves = Ncols_recv;
606   Ncols_recv = 0;
607   for(i=0; i<nlrows_is; i++){
608     ierr = PetscLayoutFindOwner(rmap,irows_indices[i],&owner);CHKERRQ(ierr);
609     for(j=0; j<ncols_recv[i]; j++){
610       iremote[Ncols_recv].rank    = owner;
611       iremote[Ncols_recv++].index = xadj_recv[i]+j;
612     }
613   }
614   ierr = ISRestoreIndices(irows,&irows_indices);CHKERRQ(ierr);
615   /*if we need to deal with edge weights ???*/
616   if(a->values){isvalue=1;}else{isvalue=0;}
617   /*involve a global communication */
618   /*ierr = MPI_Allreduce(&isvalue,&isvalue,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);*/
619   if(isvalue){ierr = PetscCalloc1(Ncols_recv,&values_recv);CHKERRQ(ierr);}
620   nroots = Ncols_send;
621   ierr = PetscSFCreate(comm,&sf);CHKERRQ(ierr);
622   ierr = PetscSFSetGraph(sf,nroots,nleaves,PETSC_NULL,PETSC_OWN_POINTER,iremote,PETSC_OWN_POINTER);CHKERRQ(ierr);
623   ierr = PetscSFSetType(sf,PETSCSFBASIC);CHKERRQ(ierr);
624   ierr = PetscSFSetFromOptions(sf);CHKERRQ(ierr);
625   ierr = PetscSFBcastBegin(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
626   ierr = PetscSFBcastEnd(sf,MPIU_INT,adjncy,adjncy_recv);CHKERRQ(ierr);
627   if(isvalue){
628 	ierr = PetscSFBcastBegin(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
629 	ierr = PetscSFBcastEnd(sf,MPIU_INT,a->values,values_recv);CHKERRQ(ierr);
630   }
631   ierr = PetscSFDestroy(&sf);CHKERRQ(ierr);
632   ierr = MatRestoreRowIJ(adj,0,PETSC_FALSE,PETSC_FALSE,&nlrows_mat,&xadj,&adjncy,&done);CHKERRQ(ierr);
633   ierr = ISGetLocalSize(icols,&icols_n);CHKERRQ(ierr);
634   ierr = ISGetIndices(icols,&icols_indices);CHKERRQ(ierr);
635   rnclos = 0;
636   for(i=0; i<nlrows_is; i++){
637     for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
638       ierr = PetscFindInt(adjncy_recv[j], icols_n, icols_indices, &loc);CHKERRQ(ierr);
639       if(loc<0){
640         adjncy_recv[j] = -1;
641         if(isvalue) values_recv[j] = -1;
642         ncols_recv[i]--;
643       }else{
644     	rnclos++;
645       }
646     }
647   }
648   ierr = ISRestoreIndices(icols,&icols_indices);CHKERRQ(ierr);
649   ierr = PetscCalloc1(rnclos,&sadjncy);CHKERRQ(ierr);
650   if(isvalue) {ierr = PetscCalloc1(rnclos,&svalues);CHKERRQ(ierr);}
651   ierr = PetscCalloc1(nlrows_is+1,&sxadj);CHKERRQ(ierr);
652   rnclos = 0;
653   for(i=0; i<nlrows_is; i++){
654 	for(j=ncols_recv_offsets[i]; j<ncols_recv_offsets[i+1]; j++){
655 	  if(adjncy_recv[j]<0) continue;
656 	  sadjncy[rnclos] = adjncy_recv[j];
657 	  if(isvalue) svalues[rnclos] = values_recv[j];
658 	  rnclos++;
659 	}
660   }
661   for(i=0; i<nlrows_is; i++){
662 	sxadj[i+1] = sxadj[i]+ncols_recv[i];
663   }
664   if(sadj_xadj)  { *sadj_xadj = sxadj;}else    { ierr = PetscFree(sxadj);CHKERRQ(ierr);}
665   if(sadj_adjncy){ *sadj_adjncy = sadjncy;}else{ ierr = PetscFree(sadjncy);CHKERRQ(ierr);}
666   if(sadj_values){
667 	if(isvalue) *sadj_values = svalues; else *sadj_values=0;
668   }else{
669 	if(isvalue) {ierr = PetscFree(svalues);CHKERRQ(ierr);}
670   }
671   ierr = PetscFree4(ncols_send,xadj_recv,ncols_recv_offsets,ncols_recv);CHKERRQ(ierr);
672   ierr = PetscFree(adjncy_recv);CHKERRQ(ierr);
673   if(isvalue) {ierr = PetscFree(values_recv);CHKERRQ(ierr);}
674   PetscFunctionReturn(0);
675 }
676 
677 
678 #undef __FUNCT__
679 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat_MPIAdj"
680 static PetscErrorCode MatMPIAdjCreateNonemptySubcommMat_MPIAdj(Mat A,Mat *B)
681 {
682   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
683   PetscErrorCode ierr;
684   const PetscInt *ranges;
685   MPI_Comm       acomm,bcomm;
686   MPI_Group      agroup,bgroup;
687   PetscMPIInt    i,rank,size,nranks,*ranks;
688 
689   PetscFunctionBegin;
690   *B    = NULL;
691   ierr  = PetscObjectGetComm((PetscObject)A,&acomm);CHKERRQ(ierr);
692   ierr  = MPI_Comm_size(acomm,&size);CHKERRQ(ierr);
693   ierr  = MPI_Comm_size(acomm,&rank);CHKERRQ(ierr);
694   ierr  = MatGetOwnershipRanges(A,&ranges);CHKERRQ(ierr);
695   for (i=0,nranks=0; i<size; i++) {
696     if (ranges[i+1] - ranges[i] > 0) nranks++;
697   }
698   if (nranks == size) {         /* All ranks have a positive number of rows, so we do not need to create a subcomm; */
699     ierr = PetscObjectReference((PetscObject)A);CHKERRQ(ierr);
700     *B   = A;
701     PetscFunctionReturn(0);
702   }
703 
704   ierr = PetscMalloc1(nranks,&ranks);CHKERRQ(ierr);
705   for (i=0,nranks=0; i<size; i++) {
706     if (ranges[i+1] - ranges[i] > 0) ranks[nranks++] = i;
707   }
708   ierr = MPI_Comm_group(acomm,&agroup);CHKERRQ(ierr);
709   ierr = MPI_Group_incl(agroup,nranks,ranks,&bgroup);CHKERRQ(ierr);
710   ierr = PetscFree(ranks);CHKERRQ(ierr);
711   ierr = MPI_Comm_create(acomm,bgroup,&bcomm);CHKERRQ(ierr);
712   ierr = MPI_Group_free(&agroup);CHKERRQ(ierr);
713   ierr = MPI_Group_free(&bgroup);CHKERRQ(ierr);
714   if (bcomm != MPI_COMM_NULL) {
715     PetscInt   m,N;
716     Mat_MPIAdj *b;
717     ierr       = MatGetLocalSize(A,&m,NULL);CHKERRQ(ierr);
718     ierr       = MatGetSize(A,NULL,&N);CHKERRQ(ierr);
719     ierr       = MatCreateMPIAdj(bcomm,m,N,a->i,a->j,a->values,B);CHKERRQ(ierr);
720     b          = (Mat_MPIAdj*)(*B)->data;
721     b->freeaij = PETSC_FALSE;
722     ierr       = MPI_Comm_free(&bcomm);CHKERRQ(ierr);
723   }
724   PetscFunctionReturn(0);
725 }
726 
727 #undef __FUNCT__
728 #define __FUNCT__ "MatMPIAdjCreateNonemptySubcommMat"
729 /*@
730    MatMPIAdjCreateNonemptySubcommMat - create the same MPIAdj matrix on a subcommunicator containing only processes owning a positive number of rows
731 
732    Collective
733 
734    Input Arguments:
735 .  A - original MPIAdj matrix
736 
737    Output Arguments:
738 .  B - matrix on subcommunicator, NULL on ranks that owned zero rows of A
739 
740    Level: developer
741 
742    Note:
743    This function is mostly useful for internal use by mesh partitioning packages that require that every process owns at least one row.
744 
745    The matrix B should be destroyed with MatDestroy(). The arrays are not copied, so B should be destroyed before A is destroyed.
746 
747 .seealso: MatCreateMPIAdj()
748 @*/
749 PetscErrorCode MatMPIAdjCreateNonemptySubcommMat(Mat A,Mat *B)
750 {
751   PetscErrorCode ierr;
752 
753   PetscFunctionBegin;
754   PetscValidHeaderSpecific(A,MAT_CLASSID,1);
755   ierr = PetscUseMethod(A,"MatMPIAdjCreateNonemptySubcommMat_C",(Mat,Mat*),(A,B));CHKERRQ(ierr);
756   PetscFunctionReturn(0);
757 }
758 
759 /*MC
760    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
761    intended for use constructing orderings and partitionings.
762 
763   Level: beginner
764 
765 .seealso: MatCreateMPIAdj
766 M*/
767 
768 #undef __FUNCT__
769 #define __FUNCT__ "MatCreate_MPIAdj"
770 PETSC_EXTERN PetscErrorCode MatCreate_MPIAdj(Mat B)
771 {
772   Mat_MPIAdj     *b;
773   PetscErrorCode ierr;
774 
775   PetscFunctionBegin;
776   ierr         = PetscNewLog(B,&b);CHKERRQ(ierr);
777   B->data      = (void*)b;
778   ierr         = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
779   B->assembled = PETSC_FALSE;
780 
781   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
782   ierr = PetscObjectComposeFunction((PetscObject)B,"MatMPIAdjCreateNonemptySubcommMat_C",MatMPIAdjCreateNonemptySubcommMat_MPIAdj);CHKERRQ(ierr);
783   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
784   PetscFunctionReturn(0);
785 }
786 
787 #undef __FUNCT__
788 #define __FUNCT__ "MatMPIAdjSetPreallocation"
789 /*@C
790    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
791 
792    Logically Collective on MPI_Comm
793 
794    Input Parameters:
795 +  A - the matrix
796 .  i - the indices into j for the start of each row
797 .  j - the column indices for each row (sorted for each row).
798        The indices in i and j start with zero (NOT with one).
799 -  values - [optional] edge weights
800 
801    Level: intermediate
802 
803 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
804 @*/
805 PetscErrorCode  MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
806 {
807   PetscErrorCode ierr;
808 
809   PetscFunctionBegin;
810   ierr = PetscTryMethod(B,"MatMPIAdjSetPreallocation_C",(Mat,PetscInt*,PetscInt*,PetscInt*),(B,i,j,values));CHKERRQ(ierr);
811   PetscFunctionReturn(0);
812 }
813 
814 #undef __FUNCT__
815 #define __FUNCT__ "MatCreateMPIAdj"
816 /*@C
817    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
818    The matrix does not have numerical values associated with it, but is
819    intended for ordering (to reduce bandwidth etc) and partitioning.
820 
821    Collective on MPI_Comm
822 
823    Input Parameters:
824 +  comm - MPI communicator
825 .  m - number of local rows
826 .  N - number of global columns
827 .  i - the indices into j for the start of each row
828 .  j - the column indices for each row (sorted for each row).
829        The indices in i and j start with zero (NOT with one).
830 -  values -[optional] edge weights
831 
832    Output Parameter:
833 .  A - the matrix
834 
835    Level: intermediate
836 
837    Notes: This matrix object does not support most matrix operations, include
838    MatSetValues().
839    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
840    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
841     call from Fortran you need not create the arrays with PetscMalloc().
842    Should not include the matrix diagonals.
843 
844    If you already have a matrix, you can create its adjacency matrix by a call
845    to MatConvert, specifying a type of MATMPIADJ.
846 
847    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
848 
849 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
850 @*/
851 PetscErrorCode  MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
852 {
853   PetscErrorCode ierr;
854 
855   PetscFunctionBegin;
856   ierr = MatCreate(comm,A);CHKERRQ(ierr);
857   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
858   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
859   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
860   PetscFunctionReturn(0);
861 }
862 
863 
864 
865 
866 
867 
868 
869