xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision c8d321e0b9e4749378a4360baa6ac0a0180938ee)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth     iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75 
76   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "MatSetOption_MPIAdj"
82 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
83 {
84   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   switch (op) {
89   case MAT_SYMMETRIC:
90   case MAT_STRUCTURALLY_SYMMETRIC:
91   case MAT_HERMITIAN:
92     a->symmetric = PETSC_TRUE;
93     break;
94   case MAT_NOT_SYMMETRIC:
95   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
96   case MAT_NOT_HERMITIAN:
97     a->symmetric = PETSC_FALSE;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100   case MAT_NOT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscLogInfo((A,"MatSetOption_MPIAdj:Option ignored\n"));CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,*diag,m = A->m;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->m; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         diag[i] = j;
129         break;
130       }
131     }
132   }
133   a->diag = diag;
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= a->rstart;
146 
147   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscTruth     flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->m != B->m) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
196 {
197   PetscErrorCode ierr;
198   PetscMPIInt    size;
199   PetscInt       i;
200   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
201 
202   PetscFunctionBegin;
203   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
204   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
205   *m    = A->m;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 /* -------------------------------------------------------------------*/
238 static struct _MatOps MatOps_Values = {0,
239        MatGetRow_MPIAdj,
240        MatRestoreRow_MPIAdj,
241        0,
242 /* 4*/ 0,
243        0,
244        0,
245        0,
246        0,
247        0,
248 /*10*/ 0,
249        0,
250        0,
251        0,
252        0,
253 /*15*/ 0,
254        MatEqual_MPIAdj,
255        0,
256        0,
257        0,
258 /*20*/ 0,
259        0,
260        0,
261        MatSetOption_MPIAdj,
262        0,
263 /*25*/ 0,
264        0,
265        0,
266        0,
267        0,
268 /*30*/ 0,
269        0,
270        0,
271        0,
272        0,
273 /*35*/ 0,
274        0,
275        0,
276        0,
277        0,
278 /*40*/ 0,
279        0,
280        0,
281        0,
282        0,
283 /*45*/ 0,
284        0,
285        0,
286        0,
287        0,
288 /*50*/ 0,
289        MatGetRowIJ_MPIAdj,
290        MatRestoreRowIJ_MPIAdj,
291        0,
292        0,
293 /*55*/ 0,
294        0,
295        0,
296        0,
297        0,
298 /*60*/ 0,
299        MatDestroy_MPIAdj,
300        MatView_MPIAdj,
301        MatGetPetscMaps_Petsc,
302        0,
303 /*65*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*70*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*75*/ 0,
314        0,
315        0,
316        0,
317        0,
318 /*80*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*85*/ 0,
324        0,
325        0,
326        0,
327        0,
328 /*90*/ 0,
329        0,
330        0,
331        0,
332        0,
333 /*95*/ 0,
334        0,
335        0,
336        0};
337 
338 EXTERN_C_BEGIN
339 #undef __FUNCT__
340 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
341 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
342 {
343   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
344   PetscErrorCode ierr;
345 #if defined(PETSC_USE_DEBUG)
346   PetscInt       ii;
347 #endif
348 
349   PetscFunctionBegin;
350   B->preallocated = PETSC_TRUE;
351 #if defined(PETSC_USE_DEBUG)
352   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
353   for (ii=1; ii<B->m; ii++) {
354     if (i[ii] < 0 || i[ii] < i[ii-1]) {
355       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
356     }
357   }
358   for (ii=0; ii<i[B->m]; ii++) {
359     if (j[ii] < 0 || j[ii] >= B->N) {
360       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
361     }
362   }
363 #endif
364 
365   b->j      = j;
366   b->i      = i;
367   b->values = values;
368 
369   b->nz               = i[B->m];
370   b->diag             = 0;
371   b->symmetric        = PETSC_FALSE;
372   b->freeaij          = PETSC_TRUE;
373 
374   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
375   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 EXTERN_C_END
379 
380 /*MC
381    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
382    intended for use constructing orderings and partitionings.
383 
384   Level: beginner
385 
386 .seealso: MatCreateMPIAdj
387 M*/
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "MatCreate_MPIAdj"
392 PetscErrorCode MatCreate_MPIAdj(Mat B)
393 {
394   Mat_MPIAdj     *b;
395   PetscErrorCode ierr;
396   PetscInt       ii;
397   PetscMPIInt    size,rank;
398 
399   PetscFunctionBegin;
400   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
401   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
402 
403   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
404   B->data             = (void*)b;
405   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
406   B->factor           = 0;
407   B->mapping          = 0;
408   B->assembled        = PETSC_FALSE;
409 
410   ierr = PetscSplitOwnership(B->comm,&B->m,&B->M);CHKERRQ(ierr);
411   B->n = B->N = PetscMax(B->N,B->n);CHKERRQ(ierr);
412 
413   /* the information in the maps duplicates the information computed below, eventually
414      we should remove the duplicate information that is not contained in the maps */
415   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
416   /* we don't know the "local columns" so just use the row information :-(*/
417   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
418 
419   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&b->rowners);CHKERRQ(ierr);
420   ierr = PetscLogObjectMemory(B,(size+2)*sizeof(PetscInt)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));CHKERRQ(ierr);
421   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
422   b->rowners[0] = 0;
423   for (ii=2; ii<=size; ii++) {
424     b->rowners[ii] += b->rowners[ii-1];
425   }
426   b->rstart = b->rowners[rank];
427   b->rend   = b->rowners[rank+1];
428   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
429                                     "MatMPIAdjSetPreallocation_MPIAdj",
430                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
431   PetscFunctionReturn(0);
432 }
433 EXTERN_C_END
434 
435 #undef __FUNCT__
436 #define __FUNCT__ "MatMPIAdjSetPreallocation"
437 /*@C
438    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
439 
440    Collective on MPI_Comm
441 
442    Input Parameters:
443 +  A - the matrix
444 .  i - the indices into j for the start of each row
445 .  j - the column indices for each row (sorted for each row).
446        The indices in i and j start with zero (NOT with one).
447 -  values - [optional] edge weights
448 
449    Level: intermediate
450 
451 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
452 @*/
453 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
454 {
455   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
456 
457   PetscFunctionBegin;
458   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
459   if (f) {
460     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
461   }
462   PetscFunctionReturn(0);
463 }
464 
465 #undef __FUNCT__
466 #define __FUNCT__ "MatCreateMPIAdj"
467 /*@C
468    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
469    The matrix does not have numerical values associated with it, but is
470    intended for ordering (to reduce bandwidth etc) and partitioning.
471 
472    Collective on MPI_Comm
473 
474    Input Parameters:
475 +  comm - MPI communicator
476 .  m - number of local rows
477 .  n - number of columns
478 .  i - the indices into j for the start of each row
479 .  j - the column indices for each row (sorted for each row).
480        The indices in i and j start with zero (NOT with one).
481 -  values -[optional] edge weights
482 
483    Output Parameter:
484 .  A - the matrix
485 
486    Level: intermediate
487 
488    Notes: This matrix object does not support most matrix operations, include
489    MatSetValues().
490    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
491    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
492     call from Fortran you need not create the arrays with PetscMalloc().
493    Should not include the matrix diagonals.
494 
495    If you already have a matrix, you can create its adjacency matrix by a call
496    to MatConvert, specifying a type of MATMPIADJ.
497 
498    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
499 
500 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
501 @*/
502 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
503 {
504   PetscErrorCode ierr;
505 
506   PetscFunctionBegin;
507   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
508   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
509   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
510   PetscFunctionReturn(0);
511 }
512 
513 EXTERN_C_BEGIN
514 #undef __FUNCT__
515 #define __FUNCT__ "MatConvertTo_MPIAdj"
516 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
517 {
518   Mat               B;
519   PetscErrorCode    ierr;
520   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
521   const PetscInt    *rj;
522   const PetscScalar *ra;
523   MPI_Comm          comm;
524 
525   PetscFunctionBegin;
526   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
527   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
528   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
529 
530   /* count the number of nonzeros per row */
531   for (i=0; i<m; i++) {
532     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
533     for (j=0; j<len; j++) {
534       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
535     }
536     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
537     nzeros += len;
538   }
539 
540   /* malloc space for nonzeros */
541   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
542   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
543   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
544 
545   nzeros = 0;
546   ia[0]  = 0;
547   for (i=0; i<m; i++) {
548     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
549     cnt     = 0;
550     for (j=0; j<len; j++) {
551       if (rj[j] != i+rstart) { /* if not diagonal */
552         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
553         ja[nzeros+cnt++] = rj[j];
554       }
555     }
556     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
557     nzeros += cnt;
558     ia[i+1] = nzeros;
559   }
560 
561   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
562   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
563   ierr = MatSetType(B,type);CHKERRQ(ierr);
564   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
565 
566   if (reuse == MAT_REUSE_MATRIX) {
567     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
568   } else {
569     *newmat = B;
570   }
571   PetscFunctionReturn(0);
572 }
573 EXTERN_C_END
574 
575 
576 
577 
578 
579