xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 72ce74bd30d5d09d1371a3eb387704f4af395d1a)
1 /*
2     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
3 */
4 #include "src/mat/impls/adj/mpi/mpiadj.h"
5 #include "petscsys.h"
6 
7 #undef __FUNCT__
8 #define __FUNCT__ "MatView_MPIAdj_ASCII"
9 PetscErrorCode MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
10 {
11   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
12   PetscErrorCode ierr;
13   int i,j,m = A->m;
14   char              *name;
15   PetscViewerFormat format;
16 
17   PetscFunctionBegin;
18   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
19   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == PETSC_VIEWER_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNCT__
40 #define __FUNCT__ "MatView_MPIAdj"
41 PetscErrorCode MatView_MPIAdj(Mat A,PetscViewer viewer)
42 {
43   PetscErrorCode ierr;
44   PetscTruth iascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&iascii);CHKERRQ(ierr);
48   if (iascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNCT__
57 #define __FUNCT__ "MatDestroy_MPIAdj"
58 PetscErrorCode MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   PetscErrorCode ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNCT__
79 #define __FUNCT__ "MatSetOption_MPIAdj"
80 PetscErrorCode MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   switch (op) {
86   case MAT_SYMMETRIC:
87   case MAT_STRUCTURALLY_SYMMETRIC:
88   case MAT_HERMITIAN:
89     a->symmetric = PETSC_TRUE;
90     break;
91   case MAT_NOT_SYMMETRIC:
92   case MAT_NOT_STRUCTURALLY_SYMMETRIC:
93   case MAT_NOT_HERMITIAN:
94     a->symmetric = PETSC_FALSE;
95     break;
96   case MAT_SYMMETRY_ETERNAL:
97   case MAT_NOT_SYMMETRY_ETERNAL:
98     break;
99   default:
100     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
101     break;
102   }
103   PetscFunctionReturn(0);
104 }
105 
106 
107 /*
108      Adds diagonal pointers to sparse matrix structure.
109 */
110 
111 #undef __FUNCT__
112 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
113 PetscErrorCode MatMarkDiagonal_MPIAdj(Mat A)
114 {
115   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
116   int        i,j,*diag,m = A->m,ierr;
117 
118   PetscFunctionBegin;
119   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
120   PetscLogObjectMemory(A,(m+1)*sizeof(int));
121   for (i=0; i<A->m; i++) {
122     for (j=a->i[i]; j<a->i[i+1]; j++) {
123       if (a->j[j] == i) {
124         diag[i] = j;
125         break;
126       }
127     }
128   }
129   a->diag = diag;
130   PetscFunctionReturn(0);
131 }
132 
133 #undef __FUNCT__
134 #define __FUNCT__ "MatGetRow_MPIAdj"
135 PetscErrorCode MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
136 {
137   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
138   int        *itmp;
139 
140   PetscFunctionBegin;
141   row -= a->rstart;
142 
143   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
144 
145   *nz = a->i[row+1] - a->i[row];
146   if (v) *v = PETSC_NULL;
147   if (idx) {
148     itmp = a->j + a->i[row];
149     if (*nz) {
150       *idx = itmp;
151     }
152     else *idx = 0;
153   }
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "MatRestoreRow_MPIAdj"
159 PetscErrorCode MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
160 {
161   PetscFunctionBegin;
162   PetscFunctionReturn(0);
163 }
164 
165 #undef __FUNCT__
166 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
167 PetscErrorCode MatGetBlockSize_MPIAdj(Mat A,int *bs)
168 {
169   PetscFunctionBegin;
170   *bs = 1;
171   PetscFunctionReturn(0);
172 }
173 
174 
175 #undef __FUNCT__
176 #define __FUNCT__ "MatEqual_MPIAdj"
177 PetscErrorCode MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
178 {
179   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
180   PetscErrorCode ierr;
181   PetscTruth  flag;
182 
183   PetscFunctionBegin;
184   /* If the  matrix dimensions are not equal,or no of nonzeros */
185   if ((A->m != B->m) ||(a->nz != b->nz)) {
186     flag = PETSC_FALSE;
187   }
188 
189   /* if the a->i are the same */
190   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
191 
192   /* if a->j are the same */
193   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
194 
195   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNCT__
200 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
201 PetscErrorCode MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
202 {
203   PetscErrorCode ierr;
204   int size,i;
205   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
206 
207   PetscFunctionBegin;
208   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
209   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
210   *m    = A->m;
211   *ia   = a->i;
212   *ja   = a->j;
213   *done = PETSC_TRUE;
214   if (oshift) {
215     for (i=0; i<(*ia)[*m]; i++) {
216       (*ja)[i]++;
217     }
218     for (i=0; i<=(*m); i++) (*ia)[i]++;
219   }
220   PetscFunctionReturn(0);
221 }
222 
223 #undef __FUNCT__
224 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
225 PetscErrorCode MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
226 {
227   int        i;
228   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
229 
230   PetscFunctionBegin;
231   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
232   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
233   if (oshift) {
234     for (i=0; i<=(*m); i++) (*ia)[i]--;
235     for (i=0; i<(*ia)[*m]; i++) {
236       (*ja)[i]--;
237     }
238   }
239   PetscFunctionReturn(0);
240 }
241 
242 /* -------------------------------------------------------------------*/
243 static struct _MatOps MatOps_Values = {0,
244        MatGetRow_MPIAdj,
245        MatRestoreRow_MPIAdj,
246        0,
247 /* 4*/ 0,
248        0,
249        0,
250        0,
251        0,
252        0,
253 /*10*/ 0,
254        0,
255        0,
256        0,
257        0,
258 /*15*/ 0,
259        MatEqual_MPIAdj,
260        0,
261        0,
262        0,
263 /*20*/ 0,
264        0,
265        0,
266        MatSetOption_MPIAdj,
267        0,
268 /*25*/ 0,
269        0,
270        0,
271        0,
272        0,
273 /*30*/ 0,
274        0,
275        0,
276        0,
277        0,
278 /*35*/ 0,
279        0,
280        0,
281        0,
282        0,
283 /*40*/ 0,
284        0,
285        0,
286        0,
287        0,
288 /*45*/ 0,
289        0,
290        0,
291        0,
292        0,
293 /*50*/ MatGetBlockSize_MPIAdj,
294        MatGetRowIJ_MPIAdj,
295        MatRestoreRowIJ_MPIAdj,
296        0,
297        0,
298 /*55*/ 0,
299        0,
300        0,
301        0,
302        0,
303 /*60*/ 0,
304        MatDestroy_MPIAdj,
305        MatView_MPIAdj,
306        MatGetPetscMaps_Petsc,
307        0,
308 /*65*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*70*/ 0,
314        0,
315        0,
316        0,
317        0,
318 /*75*/ 0,
319        0,
320        0,
321        0,
322        0,
323 /*80*/ 0,
324        0,
325        0,
326        0,
327 /*85*/ 0
328 };
329 
330 EXTERN_C_BEGIN
331 #undef __FUNCT__
332 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
333 PetscErrorCode MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
334 {
335   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
336   PetscErrorCode ierr;
337 #if defined(PETSC_USE_BOPT_g)
338   int        ii;
339 #endif
340 
341   PetscFunctionBegin;
342   B->preallocated = PETSC_TRUE;
343 #if defined(PETSC_USE_BOPT_g)
344   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
345   for (ii=1; ii<B->m; ii++) {
346     if (i[ii] < 0 || i[ii] < i[ii-1]) {
347       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
348     }
349   }
350   for (ii=0; ii<i[B->m]; ii++) {
351     if (j[ii] < 0 || j[ii] >= B->N) {
352       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
353     }
354   }
355 #endif
356 
357   b->j      = j;
358   b->i      = i;
359   b->values = values;
360 
361   b->nz               = i[B->m];
362   b->diag             = 0;
363   b->symmetric        = PETSC_FALSE;
364   b->freeaij          = PETSC_TRUE;
365 
366   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
367   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
368   PetscFunctionReturn(0);
369 }
370 EXTERN_C_END
371 
372 /*MC
373    MATMPIADJ - MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
374    intended for use constructing orderings and partitionings.
375 
376   Level: beginner
377 
378 .seealso: MatCreateMPIAdj
379 M*/
380 
381 EXTERN_C_BEGIN
382 #undef __FUNCT__
383 #define __FUNCT__ "MatCreate_MPIAdj"
384 PetscErrorCode MatCreate_MPIAdj(Mat B)
385 {
386   Mat_MPIAdj *b;
387   int        ii,ierr,size,rank;
388 
389   PetscFunctionBegin;
390   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
391   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
392 
393   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
394   B->data             = (void*)b;
395   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
396   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
397   B->factor           = 0;
398   B->lupivotthreshold = 1.0;
399   B->mapping          = 0;
400   B->assembled        = PETSC_FALSE;
401 
402   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
403   B->n = B->N;
404 
405   /* the information in the maps duplicates the information computed below, eventually
406      we should remove the duplicate information that is not contained in the maps */
407   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
408   /* we don't know the "local columns" so just use the row information :-(*/
409   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
410 
411   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
412   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
413   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
414   b->rowners[0] = 0;
415   for (ii=2; ii<=size; ii++) {
416     b->rowners[ii] += b->rowners[ii-1];
417   }
418   b->rstart = b->rowners[rank];
419   b->rend   = b->rowners[rank+1];
420   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
421                                     "MatMPIAdjSetPreallocation_MPIAdj",
422                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
423   PetscFunctionReturn(0);
424 }
425 EXTERN_C_END
426 
427 #undef __FUNCT__
428 #define __FUNCT__ "MatMPIAdjSetPreallocation"
429 /*@C
430    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
431 
432    Collective on MPI_Comm
433 
434    Input Parameters:
435 +  A - the matrix
436 .  i - the indices into j for the start of each row
437 .  j - the column indices for each row (sorted for each row).
438        The indices in i and j start with zero (NOT with one).
439 -  values - [optional] edge weights
440 
441    Level: intermediate
442 
443 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
444 @*/
445 PetscErrorCode MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
446 {
447   PetscErrorCode ierr,(*f)(Mat,int*,int*,int*);
448 
449   PetscFunctionBegin;
450   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
451   if (f) {
452     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
453   }
454   PetscFunctionReturn(0);
455 }
456 
457 #undef __FUNCT__
458 #define __FUNCT__ "MatCreateMPIAdj"
459 /*@C
460    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
461    The matrix does not have numerical values associated with it, but is
462    intended for ordering (to reduce bandwidth etc) and partitioning.
463 
464    Collective on MPI_Comm
465 
466    Input Parameters:
467 +  comm - MPI communicator
468 .  m - number of local rows
469 .  n - number of columns
470 .  i - the indices into j for the start of each row
471 .  j - the column indices for each row (sorted for each row).
472        The indices in i and j start with zero (NOT with one).
473 -  values -[optional] edge weights
474 
475    Output Parameter:
476 .  A - the matrix
477 
478    Level: intermediate
479 
480    Notes: This matrix object does not support most matrix operations, include
481    MatSetValues().
482    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
483    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
484     call from Fortran you need not create the arrays with PetscMalloc().
485    Should not include the matrix diagonals.
486 
487    If you already have a matrix, you can create its adjacency matrix by a call
488    to MatConvert, specifying a type of MATMPIADJ.
489 
490    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
491 
492 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
493 @*/
494 PetscErrorCode MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
495 {
496   PetscErrorCode ierr;
497 
498   PetscFunctionBegin;
499   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
500   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
501   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
502   PetscFunctionReturn(0);
503 }
504 
505 EXTERN_C_BEGIN
506 #undef __FUNCT__
507 #define __FUNCT__ "MatConvertTo_MPIAdj"
508 PetscErrorCode MatConvertTo_MPIAdj(Mat A,MatType type,Mat *newmat)
509 {
510   Mat               B;
511   int               i,ierr,m,N,nzeros = 0,*ia,*ja,len,rstart,cnt,j,*a;
512   const int         *rj;
513   const PetscScalar *ra;
514   MPI_Comm          comm;
515 
516   PetscFunctionBegin;
517   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
518   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
519   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
520 
521   /* count the number of nonzeros per row */
522   for (i=0; i<m; i++) {
523     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
524     for (j=0; j<len; j++) {
525       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
526     }
527     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
528     nzeros += len;
529   }
530 
531   /* malloc space for nonzeros */
532   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
533   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
534   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
535 
536   nzeros = 0;
537   ia[0]  = 0;
538   for (i=0; i<m; i++) {
539     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
540     cnt     = 0;
541     for (j=0; j<len; j++) {
542       if (rj[j] != i+rstart) { /* if not diagonal */
543         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
544         ja[nzeros+cnt++] = rj[j];
545       }
546     }
547     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
548     nzeros += cnt;
549     ia[i+1] = nzeros;
550   }
551 
552   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
553   ierr = MatCreate(comm,m,N,PETSC_DETERMINE,N,&B);CHKERRQ(ierr);
554   ierr = MatSetType(B,type);CHKERRQ(ierr);
555   ierr = MatMPIAdjSetPreallocation(B,ia,ja,a);CHKERRQ(ierr);
556 
557   /* Fake support for "inplace" convert. */
558   if (*newmat == A) {
559     ierr = MatDestroy(A);CHKERRQ(ierr);
560   }
561   *newmat = B;
562   PetscFunctionReturn(0);
563 }
564 EXTERN_C_END
565 
566 
567 
568 
569 
570