xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 958c9bcce270cdc364c8832dcd56bbb1c4a38e24)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   int               ierr,i,j,m = A->m;
13   char              *name;
14   PetscViewerFormat format;
15 
16   PetscFunctionBegin;
17   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
18   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
19   if (format == PETSC_VIEWER_ASCII_INFO) {
20     PetscFunctionReturn(0);
21   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
22     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
23   } else {
24     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
25     for (i=0; i<m; i++) {
26       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
27       for (j=a->i[i]; j<a->i[i+1]; j++) {
28         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
29       }
30       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
31     }
32     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
33   }
34   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
35   PetscFunctionReturn(0);
36 }
37 
38 #undef __FUNCT__
39 #define __FUNCT__ "MatView_MPIAdj"
40 int MatView_MPIAdj(Mat A,PetscViewer viewer)
41 {
42   int        ierr;
43   PetscTruth iascii;
44 
45   PetscFunctionBegin;
46   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
47   if (iascii) {
48     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
49   } else {
50     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
51   }
52   PetscFunctionReturn(0);
53 }
54 
55 #undef __FUNCT__
56 #define __FUNCT__ "MatDestroy_MPIAdj"
57 int MatDestroy_MPIAdj(Mat mat)
58 {
59   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
60   int        ierr;
61 
62   PetscFunctionBegin;
63 #if defined(PETSC_USE_LOG)
64   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
65 #endif
66   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
67   if (a->freeaij) {
68     ierr = PetscFree(a->i);CHKERRQ(ierr);
69     ierr = PetscFree(a->j);CHKERRQ(ierr);
70     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
71   }
72   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
73   ierr = PetscFree(a);CHKERRQ(ierr);
74   PetscFunctionReturn(0);
75 }
76 
77 #undef __FUNCT__
78 #define __FUNCT__ "MatSetOption_MPIAdj"
79 int MatSetOption_MPIAdj(Mat A,MatOption op)
80 {
81   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
82 
83   PetscFunctionBegin;
84   switch (op) {
85   case MAT_SYMMETRIC:
86   case MAT_STRUCTURALLY_SYMMETRIC:
87   case MAT_HERMITIAN:
88     a->symmetric = PETSC_TRUE;
89     break;
90   case MAT_NOT_SYMMETRIC:
91   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
92   case MAT_NOT_HERMITIAN:
93     a->symmetric = PETSC_FALSE;
94     break;
95   case MAT_SYMMETRY_ETERNAL:
96   case MAT_NOT_SYMMETRY_ETERNAL:
97     break;
98   default:
99     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
100     break;
101   }
102   PetscFunctionReturn(0);
103 }
104 
105 
106 /*
107      Adds diagonal pointers to sparse matrix structure.
108 */
109 
110 #undef __FUNCT__
111 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
112 int MatMarkDiagonal_MPIAdj(Mat A)
113 {
114   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
115   int        i,j,*diag,m = A->m,ierr;
116 
117   PetscFunctionBegin;
118   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
119   PetscLogObjectMemory(A,(m+1)*sizeof(int));
120   for (i=0; i<A->m; i++) {
121     for (j=a->i[i]; j<a->i[i+1]; j++) {
122       if (a->j[j] == i) {
123         diag[i] = j;
124         break;
125       }
126     }
127   }
128   a->diag = diag;
129   PetscFunctionReturn(0);
130 }
131 
132 #undef __FUNCT__
133 #define __FUNCT__ "MatGetRow_MPIAdj"
134 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
135 {
136   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
137   int        *itmp;
138 
139   PetscFunctionBegin;
140   row -= a->rstart;
141 
142   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
143 
144   *nz = a->i[row+1] - a->i[row];
145   if (v) *v = PETSC_NULL;
146   if (idx) {
147     itmp = a->j + a->i[row];
148     if (*nz) {
149       *idx = itmp;
150     }
151     else *idx = 0;
152   }
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "MatRestoreRow_MPIAdj"
158 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
159 {
160   PetscFunctionBegin;
161   PetscFunctionReturn(0);
162 }
163 
164 #undef __FUNCT__
165 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
166 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
167 {
168   PetscFunctionBegin;
169   *bs = 1;
170   PetscFunctionReturn(0);
171 }
172 
173 
174 #undef __FUNCT__
175 #define __FUNCT__ "MatEqual_MPIAdj"
176 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
177 {
178   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
179   int         ierr;
180   PetscTruth  flag;
181 
182   PetscFunctionBegin;
183   /* If the  matrix dimensions are not equal,or no of nonzeros */
184   if ((A->m != B->m) ||(a->nz != b->nz)) {
185     flag = PETSC_FALSE;
186   }
187 
188   /* if the a->i are the same */
189   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
190 
191   /* if a->j are the same */
192   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
193 
194   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
195   PetscFunctionReturn(0);
196 }
197 
198 #undef __FUNCT__
199 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
200 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
201 {
202   int        ierr,size,i;
203   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
204 
205   PetscFunctionBegin;
206   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
207   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
208   *m    = A->m;
209   *ia   = a->i;
210   *ja   = a->j;
211   *done = PETSC_TRUE;
212   if (oshift) {
213     for (i=0; i<(*ia)[*m]; i++) {
214       (*ja)[i]++;
215     }
216     for (i=0; i<=(*m); i++) (*ia)[i]++;
217   }
218   PetscFunctionReturn(0);
219 }
220 
221 #undef __FUNCT__
222 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
223 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
224 {
225   int        i;
226   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
227 
228   PetscFunctionBegin;
229   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
230   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
231   if (oshift) {
232     for (i=0; i<=(*m); i++) (*ia)[i]--;
233     for (i=0; i<(*ia)[*m]; i++) {
234       (*ja)[i]--;
235     }
236   }
237   PetscFunctionReturn(0);
238 }
239 
240 /* -------------------------------------------------------------------*/
241 static struct _MatOps MatOps_Values = {0,
242        MatGetRow_MPIAdj,
243        MatRestoreRow_MPIAdj,
244        0,
245 /* 4*/ 0,
246        0,
247        0,
248        0,
249        0,
250        0,
251 /*10*/ 0,
252        0,
253        0,
254        0,
255        0,
256 /*15*/ 0,
257        MatEqual_MPIAdj,
258        0,
259        0,
260        0,
261 /*20*/ 0,
262        0,
263        0,
264        MatSetOption_MPIAdj,
265        0,
266 /*25*/ 0,
267        0,
268        0,
269        0,
270        0,
271 /*30*/ 0,
272        0,
273        0,
274        0,
275        0,
276 /*35*/ 0,
277        0,
278        0,
279        0,
280        0,
281 /*40*/ 0,
282        0,
283        0,
284        0,
285        0,
286 /*45*/ 0,
287        0,
288        0,
289        0,
290        0,
291 /*50*/ MatGetBlockSize_MPIAdj,
292        MatGetRowIJ_MPIAdj,
293        MatRestoreRowIJ_MPIAdj,
294        0,
295        0,
296 /*55*/ 0,
297        0,
298        0,
299        0,
300        0,
301 /*60*/ 0,
302        MatDestroy_MPIAdj,
303        MatView_MPIAdj,
304        MatGetPetscMaps_Petsc,
305        0,
306 /*65*/ 0,
307        0,
308        0,
309        0,
310        0,
311 /*70*/ 0,
312        0,
313        0,
314        0,
315        0,
316 /*75*/ 0,
317        0,
318        0,
319        0,
320        0,
321 /*80*/ 0,
322        0,
323        0,
324        0,
325 /*85*/ 0
326 };
327 
328 EXTERN_C_BEGIN
329 #undef __FUNCT__
330 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
331 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
332 {
333   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
334   int        ierr;
335 #if defined(PETSC_USE_BOPT_g)
336   int        ii;
337 #endif
338 
339   PetscFunctionBegin;
340   B->preallocated = PETSC_TRUE;
341 #if defined(PETSC_USE_BOPT_g)
342   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
343   for (ii=1; ii<B->m; ii++) {
344     if (i[ii] < 0 || i[ii] < i[ii-1]) {
345       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
346     }
347   }
348   for (ii=0; ii<i[B->m]; ii++) {
349     if (j[ii] < 0 || j[ii] >= B->N) {
350       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
351     }
352   }
353 #endif
354 
355   b->j      = j;
356   b->i      = i;
357   b->values = values;
358 
359   b->nz               = i[B->m];
360   b->diag             = 0;
361   b->symmetric        = PETSC_FALSE;
362   b->freeaij          = PETSC_TRUE;
363 
364   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
365   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
366   PetscFunctionReturn(0);
367 }
368 EXTERN_C_END
369 
370 /*MC
371    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
372    intended for use constructing orderings and partitionings.
373 
374   Level: beginner
375 
376 .seealso: MatCreateMPIAdj
377 M*/
378 
379 EXTERN_C_BEGIN
380 #undef __FUNCT__
381 #define __FUNCT__ "MatCreate_MPIAdj"
382 int MatCreate_MPIAdj(Mat B)
383 {
384   Mat_MPIAdj *b;
385   int        ii,ierr,size,rank;
386 
387   PetscFunctionBegin;
388   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
389   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
390 
391   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
392   B->data             = (void*)b;
393   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
394   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
395   B->factor           = 0;
396   B->lupivotthreshold = 1.0;
397   B->mapping          = 0;
398   B->assembled        = PETSC_FALSE;
399 
400   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
401   B->n = B->N;
402 
403   /* the information in the maps duplicates the information computed below, eventually
404      we should remove the duplicate information that is not contained in the maps */
405   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
406   /* we don't know the "local columns" so just use the row information :-(*/
407   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
408 
409   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
410   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
411   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
412   b->rowners[0] = 0;
413   for (ii=2; ii<=size; ii++) {
414     b->rowners[ii] += b->rowners[ii-1];
415   }
416   b->rstart = b->rowners[rank];
417   b->rend   = b->rowners[rank+1];
418   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
419                                     "MatMPIAdjSetPreallocation_MPIAdj",
420                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
421   PetscFunctionReturn(0);
422 }
423 EXTERN_C_END
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "MatMPIAdjSetPreallocation"
427 /*@C
428    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
429 
430    Collective on MPI_Comm
431 
432    Input Parameters:
433 +  A - the matrix
434 .  i - the indices into j for the start of each row
435 .  j - the column indices for each row (sorted for each row).
436        The indices in i and j start with zero (NOT with one).
437 -  values - [optional] edge weights
438 
439    Level: intermediate
440 
441 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
442 @*/
443 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
444 {
445   int ierr,(*f)(Mat,int*,int*,int*);
446 
447   PetscFunctionBegin;
448   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
449   if (f) {
450     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
451   }
452   PetscFunctionReturn(0);
453 }
454 
455 #undef __FUNCT__
456 #define __FUNCT__ "MatCreateMPIAdj"
457 /*@C
458    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
459    The matrix does not have numerical values associated with it, but is
460    intended for ordering (to reduce bandwidth etc) and partitioning.
461 
462    Collective on MPI_Comm
463 
464    Input Parameters:
465 +  comm - MPI communicator
466 .  m - number of local rows
467 .  n - number of columns
468 .  i - the indices into j for the start of each row
469 .  j - the column indices for each row (sorted for each row).
470        The indices in i and j start with zero (NOT with one).
471 -  values -[optional] edge weights
472 
473    Output Parameter:
474 .  A - the matrix
475 
476    Level: intermediate
477 
478    Notes: This matrix object does not support most matrix operations, include
479    MatSetValues().
480    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
481    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
482     call from Fortran you need not create the arrays with PetscMalloc().
483    Should not include the matrix diagonals.
484 
485    If you already have a matrix, you can create its adjacency matrix by a call
486    to MatConvert, specifying a type of MATMPIADJ.
487 
488    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
489 
490 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
491 @*/
492 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
493 {
494   int        ierr;
495 
496   PetscFunctionBegin;
497   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
498   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
499   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
500   PetscFunctionReturn(0);
501 }
502 
503 EXTERN_C_BEGIN
504 #undef __FUNCT__
505 #define __FUNCT__ "MatConvertTo_MPIAdj"
506 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
507 {
508   Mat               B;
509   int               i,ierr,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
510   const int         *rj;
511   const PetscScalar *ra;
512   MPI_Comm          comm;
513 
514   PetscFunctionBegin;
515   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
516   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
517   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
518 
519   /* count the number of nonzeros per row */
520   for (i=0; i<m; i++) {
521     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
522     for (j=0; j<len; j++) {
523       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
524     }
525     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
526     nzeros += len;
527   }
528 
529   /* malloc space for nonzeros */
530   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
531   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
532   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
533 
534   nzeros = 0;
535   ia[0]  = 0;
536   for (i=0; i<m; i++) {
537     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
538     cnt     = 0;
539     for (j=0; j<len; j++) {
540       if (rj[j] != i+rstart) { /* if not diagonal */
541         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
542         ja[nzeros+cnt++] = rj[j];
543       }
544     }
545     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
546     nzeros += cnt;
547     ia[i+1] = nzeros;
548   }
549 
550   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
551   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
552   ierr = MatSetType(B,type);CHKERRQ(ierr);
553   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
554 
555   /* Fake support for "inplace" convert. */
556   if (*newmat == A) {
557     ierr = MatDestroy(A);CHKERRQ(ierr);
558   }
559   *newmat = B;
560   PetscFunctionReturn(0);
561 }
562 EXTERN_C_END
563 
564 
565 
566 
567 
568