xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision ec1065ed1313a9b9cd7de820fe29091efccf671c)
1 #define PETSCMAT_DLL
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "../src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   PetscErrorCode    ierr;
15   PetscInt          i,j,m = A->rmap->n;
16   const char        *name;
17   PetscViewerFormat format;
18 
19   PetscFunctionBegin;
20   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
21   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
22   if (format == PETSC_VIEWER_ASCII_INFO) {
23     PetscFunctionReturn(0);
24   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
25     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
26   } else {
27     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
28     for (i=0; i<m; i++) {
29       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+A->rmap->rstart);CHKERRQ(ierr);
30       for (j=a->i[i]; j<a->i[i+1]; j++) {
31         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
34     }
35     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
36   }
37   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
38   PetscFunctionReturn(0);
39 }
40 
41 #undef __FUNCT__
42 #define __FUNCT__ "MatView_MPIAdj"
43 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
44 {
45   PetscErrorCode ierr;
46   PetscTruth     iascii;
47 
48   PetscFunctionBegin;
49   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
50   if (iascii) {
51     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
52   } else {
53     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
54   }
55   PetscFunctionReturn(0);
56 }
57 
58 #undef __FUNCT__
59 #define __FUNCT__ "MatDestroy_MPIAdj"
60 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
61 {
62   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
63   PetscErrorCode ierr;
64 
65   PetscFunctionBegin;
66 #if defined(PETSC_USE_LOG)
67   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->rmap->n,mat->cmap->n,a->nz);
68 #endif
69   ierr = PetscFree(a->diag);CHKERRQ(ierr);
70   if (a->freeaij) {
71     if (a->freeaijwithfree) {
72       if (a->i) free(a->i);
73       if (a->j) free(a->j);
74     } else {
75       ierr = PetscFree(a->i);CHKERRQ(ierr);
76       ierr = PetscFree(a->j);CHKERRQ(ierr);
77       ierr = PetscFree(a->values);CHKERRQ(ierr);
78     }
79   }
80   ierr = PetscFree(a);CHKERRQ(ierr);
81   ierr = PetscObjectChangeTypeName((PetscObject)mat,0);CHKERRQ(ierr);
82   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
83   PetscFunctionReturn(0);
84 }
85 
86 #undef __FUNCT__
87 #define __FUNCT__ "MatSetOption_MPIAdj"
88 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op,PetscTruth flg)
89 {
90   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
91   PetscErrorCode ierr;
92 
93   PetscFunctionBegin;
94   switch (op) {
95   case MAT_SYMMETRIC:
96   case MAT_STRUCTURALLY_SYMMETRIC:
97   case MAT_HERMITIAN:
98     a->symmetric = flg;
99     break;
100   case MAT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscInfo1(A,"Option %s ignored\n",MatOptions[op]);CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,m = A->rmap->n;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc(m*sizeof(PetscInt),&a->diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory(A,m*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->rmap->n; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         a->diag[i] = j;
129         break;
130       }
131     }
132   }
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNCT__
137 #define __FUNCT__ "MatGetRow_MPIAdj"
138 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
139 {
140   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
141   PetscInt   *itmp;
142 
143   PetscFunctionBegin;
144   row -= A->rmap->rstart;
145 
146   if (row < 0 || row >= A->rmap->n) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
147 
148   *nz = a->i[row+1] - a->i[row];
149   if (v) *v = PETSC_NULL;
150   if (idx) {
151     itmp = a->j + a->i[row];
152     if (*nz) {
153       *idx = itmp;
154     }
155     else *idx = 0;
156   }
157   PetscFunctionReturn(0);
158 }
159 
160 #undef __FUNCT__
161 #define __FUNCT__ "MatRestoreRow_MPIAdj"
162 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
163 {
164   PetscFunctionBegin;
165   PetscFunctionReturn(0);
166 }
167 
168 #undef __FUNCT__
169 #define __FUNCT__ "MatEqual_MPIAdj"
170 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
171 {
172   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
173   PetscErrorCode ierr;
174   PetscTruth     flag;
175 
176   PetscFunctionBegin;
177   /* If the  matrix dimensions are not equal,or no of nonzeros */
178   if ((A->rmap->n != B->rmap->n) ||(a->nz != b->nz)) {
179     flag = PETSC_FALSE;
180   }
181 
182   /* if the a->i are the same */
183   ierr = PetscMemcmp(a->i,b->i,(A->rmap->n+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
184 
185   /* if a->j are the same */
186   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
187 
188   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,((PetscObject)A)->comm);CHKERRQ(ierr);
189   PetscFunctionReturn(0);
190 }
191 
192 #undef __FUNCT__
193 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
194 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
195 {
196   PetscErrorCode ierr;
197   PetscMPIInt    size;
198   PetscInt       i;
199   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
200 
201   PetscFunctionBegin;
202   ierr = MPI_Comm_size(((PetscObject)A)->comm,&size);CHKERRQ(ierr);
203   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
204   *m    = A->rmap->n;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscTruth blockcompressed,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
220 {
221   PetscInt   i;
222   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
223 
224   PetscFunctionBegin;
225   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
226   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
227   if (oshift) {
228     for (i=0; i<=(*m); i++) (*ia)[i]--;
229     for (i=0; i<(*ia)[*m]; i++) {
230       (*ja)[i]--;
231     }
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 #undef __FUNCT__
237 #define __FUNCT__ "MatConvertFrom_MPIAdj"
238 PetscErrorCode PETSCMAT_DLLEXPORT MatConvertFrom_MPIAdj(Mat A,const MatType type,MatReuse reuse,Mat *newmat)
239 {
240   Mat               B;
241   PetscErrorCode    ierr;
242   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
243   const PetscInt    *rj;
244   const PetscScalar *ra;
245   MPI_Comm          comm;
246 
247   PetscFunctionBegin;
248   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
249   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
250   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
251 
252   /* count the number of nonzeros per row */
253   for (i=0; i<m; i++) {
254     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
255     for (j=0; j<len; j++) {
256       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
257     }
258     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
259     nzeros += len;
260   }
261 
262   /* malloc space for nonzeros */
263   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
264   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
265   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
266 
267   nzeros = 0;
268   ia[0]  = 0;
269   for (i=0; i<m; i++) {
270     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
271     cnt     = 0;
272     for (j=0; j<len; j++) {
273       if (rj[j] != i+rstart) { /* if not diagonal */
274         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
275         ja[nzeros+cnt++] = rj[j];
276       }
277     }
278     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
279     nzeros += cnt;
280     ia[i+1] = nzeros;
281   }
282 
283   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
284   ierr = MatCreate(comm,&B);CHKERRQ(ierr);
285   ierr = MatSetSizes(B,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
286   ierr = MatSetType(B,type);CHKERRQ(ierr);
287   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
288 
289   if (reuse == MAT_REUSE_MATRIX) {
290     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
291   } else {
292     *newmat = B;
293   }
294   PetscFunctionReturn(0);
295 }
296 
297 /* -------------------------------------------------------------------*/
298 static struct _MatOps MatOps_Values = {0,
299        MatGetRow_MPIAdj,
300        MatRestoreRow_MPIAdj,
301        0,
302 /* 4*/ 0,
303        0,
304        0,
305        0,
306        0,
307        0,
308 /*10*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*15*/ 0,
314        MatEqual_MPIAdj,
315        0,
316        0,
317        0,
318 /*20*/ 0,
319        0,
320        MatSetOption_MPIAdj,
321        0,
322 /*24*/ 0,
323        0,
324        0,
325        0,
326        0,
327 /*29*/ 0,
328        0,
329        0,
330        0,
331        0,
332 /*34*/ 0,
333        0,
334        0,
335        0,
336        0,
337 /*39*/ 0,
338        0,
339        0,
340        0,
341        0,
342 /*44*/ 0,
343        0,
344        0,
345        0,
346        0,
347 /*49*/ 0,
348        MatGetRowIJ_MPIAdj,
349        MatRestoreRowIJ_MPIAdj,
350        0,
351        0,
352 /*54*/ 0,
353        0,
354        0,
355        0,
356        0,
357 /*59*/ 0,
358        MatDestroy_MPIAdj,
359        MatView_MPIAdj,
360        MatConvertFrom_MPIAdj,
361        0,
362 /*64*/ 0,
363        0,
364        0,
365        0,
366        0,
367 /*69*/ 0,
368        0,
369        0,
370        0,
371        0,
372 /*74*/ 0,
373        0,
374        0,
375        0,
376        0,
377 /*79*/ 0,
378        0,
379        0,
380        0,
381        0,
382 /*84*/ 0,
383        0,
384        0,
385        0,
386        0,
387 /*89*/ 0,
388        0,
389        0,
390        0,
391        0,
392 /*94*/ 0,
393        0,
394        0,
395        0};
396 
397 EXTERN_C_BEGIN
398 #undef __FUNCT__
399 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
400 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
401 {
402   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
403   PetscErrorCode ierr;
404 #if defined(PETSC_USE_DEBUG)
405   PetscInt       ii;
406 #endif
407 
408   PetscFunctionBegin;
409   ierr = PetscMapSetBlockSize(B->rmap,1);CHKERRQ(ierr);
410   ierr = PetscMapSetBlockSize(B->cmap,1);CHKERRQ(ierr);
411   ierr = PetscMapSetUp(B->rmap);CHKERRQ(ierr);
412   ierr = PetscMapSetUp(B->cmap);CHKERRQ(ierr);
413 
414 #if defined(PETSC_USE_DEBUG)
415   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
416   for (ii=1; ii<B->rmap->n; ii++) {
417     if (i[ii] < 0 || i[ii] < i[ii-1]) {
418       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
419     }
420   }
421   for (ii=0; ii<i[B->rmap->n]; ii++) {
422     if (j[ii] < 0 || j[ii] >= B->cmap->N) {
423       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
424     }
425   }
426 #endif
427   B->preallocated = PETSC_TRUE;
428 
429   b->j      = j;
430   b->i      = i;
431   b->values = values;
432 
433   b->nz               = i[B->rmap->n];
434   b->diag             = 0;
435   b->symmetric        = PETSC_FALSE;
436   b->freeaij          = PETSC_TRUE;
437 
438   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
439   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
440   PetscFunctionReturn(0);
441 }
442 EXTERN_C_END
443 
444 /*MC
445    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
446    intended for use constructing orderings and partitionings.
447 
448   Level: beginner
449 
450 .seealso: MatCreateMPIAdj
451 M*/
452 
453 EXTERN_C_BEGIN
454 #undef __FUNCT__
455 #define __FUNCT__ "MatCreate_MPIAdj"
456 PetscErrorCode PETSCMAT_DLLEXPORT MatCreate_MPIAdj(Mat B)
457 {
458   Mat_MPIAdj     *b;
459   PetscErrorCode ierr;
460   PetscMPIInt    size,rank;
461 
462   PetscFunctionBegin;
463   ierr = MPI_Comm_size(((PetscObject)B)->comm,&size);CHKERRQ(ierr);
464   ierr = MPI_Comm_rank(((PetscObject)B)->comm,&rank);CHKERRQ(ierr);
465 
466   ierr                = PetscNewLog(B,Mat_MPIAdj,&b);CHKERRQ(ierr);
467   B->data             = (void*)b;
468   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
469   B->mapping          = 0;
470   B->assembled        = PETSC_FALSE;
471 
472   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
473                                     "MatMPIAdjSetPreallocation_MPIAdj",
474                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
475   ierr = PetscObjectChangeTypeName((PetscObject)B,MATMPIADJ);CHKERRQ(ierr);
476   PetscFunctionReturn(0);
477 }
478 EXTERN_C_END
479 
480 #undef __FUNCT__
481 #define __FUNCT__ "MatMPIAdjSetPreallocation"
482 /*@C
483    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
484 
485    Collective on MPI_Comm
486 
487    Input Parameters:
488 +  A - the matrix
489 .  i - the indices into j for the start of each row
490 .  j - the column indices for each row (sorted for each row).
491        The indices in i and j start with zero (NOT with one).
492 -  values - [optional] edge weights
493 
494    Level: intermediate
495 
496 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
497 @*/
498 PetscErrorCode PETSCMAT_DLLEXPORT MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
499 {
500   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
501 
502   PetscFunctionBegin;
503   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
504   if (f) {
505     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
506   }
507   PetscFunctionReturn(0);
508 }
509 
510 #undef __FUNCT__
511 #define __FUNCT__ "MatCreateMPIAdj"
512 /*@C
513    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
514    The matrix does not have numerical values associated with it, but is
515    intended for ordering (to reduce bandwidth etc) and partitioning.
516 
517    Collective on MPI_Comm
518 
519    Input Parameters:
520 +  comm - MPI communicator
521 .  m - number of local rows
522 .  N - number of global columns
523 .  i - the indices into j for the start of each row
524 .  j - the column indices for each row (sorted for each row).
525        The indices in i and j start with zero (NOT with one).
526 -  values -[optional] edge weights
527 
528    Output Parameter:
529 .  A - the matrix
530 
531    Level: intermediate
532 
533    Notes: This matrix object does not support most matrix operations, include
534    MatSetValues().
535    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
536    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
537     call from Fortran you need not create the arrays with PetscMalloc().
538    Should not include the matrix diagonals.
539 
540    If you already have a matrix, you can create its adjacency matrix by a call
541    to MatConvert, specifying a type of MATMPIADJ.
542 
543    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
544 
545 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
546 @*/
547 PetscErrorCode PETSCMAT_DLLEXPORT MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt N,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
548 {
549   PetscErrorCode ierr;
550 
551   PetscFunctionBegin;
552   ierr = MatCreate(comm,A);CHKERRQ(ierr);
553   ierr = MatSetSizes(*A,m,PETSC_DETERMINE,PETSC_DETERMINE,N);CHKERRQ(ierr);
554   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
555   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
556   PetscFunctionReturn(0);
557 }
558 
559 
560 
561 
562 
563 
564 
565