xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 63ba0a88a373a65bb5c631b46cccd6707d280002)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode    ierr;
13   PetscInt          i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %D:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %D ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth     iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(PETSC_ERR_SUP,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj     *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%D, Cols=%D, NZ=%D",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75 
76   ierr = PetscObjectComposeFunction((PetscObject)mat,"MatMPIAdjSetPreallocation_C","",PETSC_NULL);CHKERRQ(ierr);
77   PetscFunctionReturn(0);
78 }
79 
80 #undef __FUNCT__
81 #define __FUNCT__ "MatSetOption_MPIAdj"
82 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
83 {
84   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
85   PetscErrorCode ierr;
86 
87   PetscFunctionBegin;
88   switch (op) {
89   case MAT_SYMMETRIC:
90   case MAT_STRUCTURALLY_SYMMETRIC:
91   case MAT_HERMITIAN:
92     a->symmetric = PETSC_TRUE;
93     break;
94   case MAT_NOT_SYMMETRIC:
95   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
96   case MAT_NOT_HERMITIAN:
97     a->symmetric = PETSC_FALSE;
98     break;
99   case MAT_SYMMETRY_ETERNAL:
100   case MAT_NOT_SYMMETRY_ETERNAL:
101     break;
102   default:
103     ierr = PetscLogInfo((A,"MatSetOption_MPIAdj:Option ignored\n"));CHKERRQ(ierr);
104     break;
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNCT__
115 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
116 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj     *a = (Mat_MPIAdj*)A->data;
119   PetscErrorCode ierr;
120   PetscInt       i,j,*diag,m = A->m;
121 
122   PetscFunctionBegin;
123   ierr = PetscMalloc((m+1)*sizeof(PetscInt),&diag);CHKERRQ(ierr);
124   ierr = PetscLogObjectMemory(A,(m+1)*sizeof(PetscInt));CHKERRQ(ierr);
125   for (i=0; i<A->m; i++) {
126     for (j=a->i[i]; j<a->i[i+1]; j++) {
127       if (a->j[j] == i) {
128         diag[i] = j;
129         break;
130       }
131     }
132   }
133   a->diag = diag;
134   PetscFunctionReturn(0);
135 }
136 
137 #undef __FUNCT__
138 #define __FUNCT__ "MatGetRow_MPIAdj"
139 PetscErrorCode MatGetRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
140 {
141   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
142   PetscInt   *itmp;
143 
144   PetscFunctionBegin;
145   row -= a->rstart;
146 
147   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
148 
149   *nz = a->i[row+1] - a->i[row];
150   if (v) *v = PETSC_NULL;
151   if (idx) {
152     itmp = a->j + a->i[row];
153     if (*nz) {
154       *idx = itmp;
155     }
156     else *idx = 0;
157   }
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNCT__
162 #define __FUNCT__ "MatRestoreRow_MPIAdj"
163 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,PetscInt row,PetscInt *nz,PetscInt **idx,PetscScalar **v)
164 {
165   PetscFunctionBegin;
166   PetscFunctionReturn(0);
167 }
168 
169 #undef __FUNCT__
170 #define __FUNCT__ "MatEqual_MPIAdj"
171 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
172 {
173   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
174   PetscErrorCode ierr;
175   PetscTruth     flag;
176 
177   PetscFunctionBegin;
178   /* If the  matrix dimensions are not equal,or no of nonzeros */
179   if ((A->m != B->m) ||(a->nz != b->nz)) {
180     flag = PETSC_FALSE;
181   }
182 
183   /* if the a->i are the same */
184   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
185 
186   /* if a->j are the same */
187   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(PetscInt),&flag);CHKERRQ(ierr);
188 
189   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
190   PetscFunctionReturn(0);
191 }
192 
193 #undef __FUNCT__
194 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
195 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
196 {
197   PetscErrorCode ierr;
198   PetscMPIInt    size;
199   PetscInt       i;
200   Mat_MPIAdj     *a = (Mat_MPIAdj *)A->data;
201 
202   PetscFunctionBegin;
203   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
204   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
205   *m    = A->m;
206   *ia   = a->i;
207   *ja   = a->j;
208   *done = PETSC_TRUE;
209   if (oshift) {
210     for (i=0; i<(*ia)[*m]; i++) {
211       (*ja)[i]++;
212     }
213     for (i=0; i<=(*m); i++) (*ia)[i]++;
214   }
215   PetscFunctionReturn(0);
216 }
217 
218 #undef __FUNCT__
219 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
220 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,PetscInt oshift,PetscTruth symmetric,PetscInt *m,PetscInt *ia[],PetscInt *ja[],PetscTruth *done)
221 {
222   PetscInt   i;
223   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
224 
225   PetscFunctionBegin;
226   if (ia && a->i != *ia) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ia passed back is not one obtained with MatGetRowIJ()");
227   if (ja && a->j != *ja) SETERRQ(PETSC_ERR_ARG_WRONGSTATE,"ja passed back is not one obtained with MatGetRowIJ()");
228   if (oshift) {
229     for (i=0; i<=(*m); i++) (*ia)[i]--;
230     for (i=0; i<(*ia)[*m]; i++) {
231       (*ja)[i]--;
232     }
233   }
234   PetscFunctionReturn(0);
235 }
236 
237 /* -------------------------------------------------------------------*/
238 static struct _MatOps MatOps_Values = {0,
239        MatGetRow_MPIAdj,
240        MatRestoreRow_MPIAdj,
241        0,
242 /* 4*/ 0,
243        0,
244        0,
245        0,
246        0,
247        0,
248 /*10*/ 0,
249        0,
250        0,
251        0,
252        0,
253 /*15*/ 0,
254        MatEqual_MPIAdj,
255        0,
256        0,
257        0,
258 /*20*/ 0,
259        0,
260        0,
261        MatSetOption_MPIAdj,
262        0,
263 /*25*/ 0,
264        0,
265        0,
266        0,
267        0,
268 /*30*/ 0,
269        0,
270        0,
271        0,
272        0,
273 /*35*/ 0,
274        0,
275        0,
276        0,
277        0,
278 /*40*/ 0,
279        0,
280        0,
281        0,
282        0,
283 /*45*/ 0,
284        0,
285        0,
286        0,
287        0,
288 /*50*/ 0,
289        MatGetRowIJ_MPIAdj,
290        MatRestoreRowIJ_MPIAdj,
291        0,
292        0,
293 /*55*/ 0,
294        0,
295        0,
296        0,
297        0,
298 /*60*/ 0,
299        MatDestroy_MPIAdj,
300        MatView_MPIAdj,
301        MatGetPetscMaps_Petsc,
302        0,
303 /*65*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*70*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*75*/ 0,
314        0,
315        0,
316        0,
317        0,
318 /*80*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*85*/ 0,
324        0,
325        0,
326        0,
327        0,
328 /*90*/ 0,
329        0,
330        0,
331        0,
332        0,
333 /*95*/ 0,
334        0,
335        0,
336        0};
337 
338 EXTERN_C_BEGIN
339 #undef __FUNCT__
340 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
341 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
342 {
343   Mat_MPIAdj     *b = (Mat_MPIAdj *)B->data;
344   PetscErrorCode ierr;
345 #if defined(PETSC_USE_DEBUG)
346   PetscInt       ii;
347 #endif
348 
349   PetscFunctionBegin;
350   B->preallocated = PETSC_TRUE;
351 #if defined(PETSC_USE_DEBUG)
352   if (i[0] != 0) SETERRQ1(PETSC_ERR_ARG_OUTOFRANGE,"First i[] index must be zero, instead it is %D\n",i[0]);
353   for (ii=1; ii<B->m; ii++) {
354     if (i[ii] < 0 || i[ii] < i[ii-1]) {
355       SETERRQ4(PETSC_ERR_ARG_OUTOFRANGE,"i[%D]=%D index is out of range: i[%D]=%D",ii,i[ii],ii-1,i[ii-1]);
356     }
357   }
358   for (ii=0; ii<i[B->m]; ii++) {
359     if (j[ii] < 0 || j[ii] >= B->N) {
360       SETERRQ2(PETSC_ERR_ARG_OUTOFRANGE,"Column index %D out of range %D\n",ii,j[ii]);
361     }
362   }
363 #endif
364 
365   b->j      = j;
366   b->i      = i;
367   b->values = values;
368 
369   b->nz               = i[B->m];
370   b->diag             = 0;
371   b->symmetric        = PETSC_FALSE;
372   b->freeaij          = PETSC_TRUE;
373 
374   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
375   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
376   PetscFunctionReturn(0);
377 }
378 EXTERN_C_END
379 
380 /*MC
381    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
382    intended for use constructing orderings and partitionings.
383 
384   Level: beginner
385 
386 .seealso: MatCreateMPIAdj
387 M*/
388 
389 EXTERN_C_BEGIN
390 #undef __FUNCT__
391 #define __FUNCT__ "MatCreate_MPIAdj"
392 PetscErrorCode MatCreate_MPIAdj(Mat B)
393 {
394   Mat_MPIAdj     *b;
395   PetscErrorCode ierr;
396   PetscInt       ii;
397   PetscMPIInt    size,rank;
398 
399   PetscFunctionBegin;
400   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
401   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
402 
403   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
404   B->data             = (void*)b;
405   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
406   B->factor           = 0;
407   B->lupivotthreshold = 1.0;
408   B->mapping          = 0;
409   B->assembled        = PETSC_FALSE;
410 
411   ierr = PetscSplitOwnership(B->comm,&B->m,&B->M);CHKERRQ(ierr);
412   B->n = B->N = PetscMax(B->N,B->n);CHKERRQ(ierr);
413 
414   /* the information in the maps duplicates the information computed below, eventually
415      we should remove the duplicate information that is not contained in the maps */
416   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
417   /* we don't know the "local columns" so just use the row information :-(*/
418   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
419 
420   ierr = PetscMalloc((size+1)*sizeof(PetscInt),&b->rowners);CHKERRQ(ierr);
421   ierr = PetscLogObjectMemory(B,(size+2)*sizeof(PetscInt)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));CHKERRQ(ierr);
422   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
423   b->rowners[0] = 0;
424   for (ii=2; ii<=size; ii++) {
425     b->rowners[ii] += b->rowners[ii-1];
426   }
427   b->rstart = b->rowners[rank];
428   b->rend   = b->rowners[rank+1];
429   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
430                                     "MatMPIAdjSetPreallocation_MPIAdj",
431                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
432   PetscFunctionReturn(0);
433 }
434 EXTERN_C_END
435 
436 #undef __FUNCT__
437 #define __FUNCT__ "MatMPIAdjSetPreallocation"
438 /*@C
439    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
440 
441    Collective on MPI_Comm
442 
443    Input Parameters:
444 +  A - the matrix
445 .  i - the indices into j for the start of each row
446 .  j - the column indices for each row (sorted for each row).
447        The indices in i and j start with zero (NOT with one).
448 -  values - [optional] edge weights
449 
450    Level: intermediate
451 
452 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
453 @*/
454 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,PetscInt *i,PetscInt *j,PetscInt *values)
455 {
456   PetscErrorCode ierr,(*f)(Mat,PetscInt*,PetscInt*,PetscInt*);
457 
458   PetscFunctionBegin;
459   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
460   if (f) {
461     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
462   }
463   PetscFunctionReturn(0);
464 }
465 
466 #undef __FUNCT__
467 #define __FUNCT__ "MatCreateMPIAdj"
468 /*@C
469    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
470    The matrix does not have numerical values associated with it, but is
471    intended for ordering (to reduce bandwidth etc) and partitioning.
472 
473    Collective on MPI_Comm
474 
475    Input Parameters:
476 +  comm - MPI communicator
477 .  m - number of local rows
478 .  n - number of columns
479 .  i - the indices into j for the start of each row
480 .  j - the column indices for each row (sorted for each row).
481        The indices in i and j start with zero (NOT with one).
482 -  values -[optional] edge weights
483 
484    Output Parameter:
485 .  A - the matrix
486 
487    Level: intermediate
488 
489    Notes: This matrix object does not support most matrix operations, include
490    MatSetValues().
491    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
492    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
493     call from Fortran you need not create the arrays with PetscMalloc().
494    Should not include the matrix diagonals.
495 
496    If you already have a matrix, you can create its adjacency matrix by a call
497    to MatConvert, specifying a type of MATMPIADJ.
498 
499    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
500 
501 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
502 @*/
503 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,PetscInt m,PetscInt n,PetscInt *i,PetscInt *j,PetscInt *values,Mat *A)
504 {
505   PetscErrorCode ierr;
506 
507   PetscFunctionBegin;
508   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
509   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
510   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
511   PetscFunctionReturn(0);
512 }
513 
514 EXTERN_C_BEGIN
515 #undef __FUNCT__
516 #define __FUNCT__ "MatConvertTo_MPIAdj"
517 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,MatReuse reuse,Mat *newmat)
518 {
519   Mat               B;
520   PetscErrorCode    ierr;
521   PetscInt          i,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
522   const PetscInt    *rj;
523   const PetscScalar *ra;
524   MPI_Comm          comm;
525 
526   PetscFunctionBegin;
527   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
528   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
529   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
530 
531   /* count the number of nonzeros per row */
532   for (i=0; i<m; i++) {
533     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
534     for (j=0; j<len; j++) {
535       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
536     }
537     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
538     nzeros += len;
539   }
540 
541   /* malloc space for nonzeros */
542   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&a);CHKERRQ(ierr);
543   ierr = PetscMalloc((N+1)*sizeof(PetscInt),&ia);CHKERRQ(ierr);
544   ierr = PetscMalloc((nzeros+1)*sizeof(PetscInt),&ja);CHKERRQ(ierr);
545 
546   nzeros = 0;
547   ia[0]  = 0;
548   for (i=0; i<m; i++) {
549     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
550     cnt     = 0;
551     for (j=0; j<len; j++) {
552       if (rj[j] != i+rstart) { /* if not diagonal */
553         a[nzeros+cnt]    = (PetscInt) PetscAbsScalar(ra[j]);
554         ja[nzeros+cnt++] = rj[j];
555       }
556     }
557     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
558     nzeros += cnt;
559     ia[i+1] = nzeros;
560   }
561 
562   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
563   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
564   ierr = MatSetType(B,type);CHKERRQ(ierr);
565   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
566 
567   if (reuse == MAT_REUSE_MATRIX) {
568     ierr = MatHeaderReplace(A,B);CHKERRQ(ierr);
569   } else {
570     *newmat = B;
571   }
572   PetscFunctionReturn(0);
573 }
574 EXTERN_C_END
575 
576 
577 
578 
579 
580