xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b7d02931603f2882dca46274911ebda259f56fb0)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   default:
91     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
92     break;
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 
98 /*
99      Adds diagonal pointers to sparse matrix structure.
100 */
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
104 int MatMarkDiagonal_MPIAdj(Mat A)
105 {
106   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
107   int        i,j,*diag,m = A->m,ierr;
108 
109   PetscFunctionBegin;
110   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
111   PetscLogObjectMemory(A,(m+1)*sizeof(int));
112   for (i=0; i<A->m; i++) {
113     for (j=a->i[i]; j<a->i[i+1]; j++) {
114       if (a->j[j] == i) {
115         diag[i] = j;
116         break;
117       }
118     }
119   }
120   a->diag = diag;
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "MatGetRow_MPIAdj"
126 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
127 {
128   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
129   int        *itmp;
130 
131   PetscFunctionBegin;
132   row -= a->rstart;
133 
134   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
135 
136   *nz = a->i[row+1] - a->i[row];
137   if (v) *v = PETSC_NULL;
138   if (idx) {
139     itmp = a->j + a->i[row];
140     if (*nz) {
141       *idx = itmp;
142     }
143     else *idx = 0;
144   }
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "MatRestoreRow_MPIAdj"
150 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
151 {
152   PetscFunctionBegin;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
158 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
159 {
160   PetscFunctionBegin;
161   *bs = 1;
162   PetscFunctionReturn(0);
163 }
164 
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatEqual_MPIAdj"
168 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
169 {
170   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
171   int         ierr;
172   PetscTruth  flag;
173 
174   PetscFunctionBegin;
175   /* If the  matrix dimensions are not equal,or no of nonzeros */
176   if ((A->m != B->m) ||(a->nz != b->nz)) {
177     flag = PETSC_FALSE;
178   }
179 
180   /* if the a->i are the same */
181   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
182 
183   /* if a->j are the same */
184   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
185 
186   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
192 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
193 {
194   int        ierr,size,i;
195   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
196 
197   PetscFunctionBegin;
198   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
199   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
200   *m    = A->m;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
216 {
217   int        i;
218   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
219 
220   PetscFunctionBegin;
221   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
222   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
223   if (oshift) {
224     for (i=0; i<=(*m); i++) (*ia)[i]--;
225     for (i=0; i<(*ia)[*m]; i++) {
226       (*ja)[i]--;
227     }
228   }
229   PetscFunctionReturn(0);
230 }
231 
232 /* -------------------------------------------------------------------*/
233 static struct _MatOps MatOps_Values = {0,
234        MatGetRow_MPIAdj,
235        MatRestoreRow_MPIAdj,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        MatEqual_MPIAdj,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        MatSetOption_MPIAdj,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        MatGetBlockSize_MPIAdj,
284        MatGetRowIJ_MPIAdj,
285        MatRestoreRowIJ_MPIAdj,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        MatDestroy_MPIAdj,
295        MatView_MPIAdj,
296        MatGetPetscMaps_Petsc};
297 
298 EXTERN_C_BEGIN
299 #undef __FUNCT__
300 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
301 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
302 {
303   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
304   int        ierr;
305 #if defined(PETSC_USE_BOPT_g)
306   int        ii;
307 #endif
308 
309   PetscFunctionBegin;
310   B->preallocated = PETSC_TRUE;
311 #if defined(PETSC_USE_BOPT_g)
312   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
313   for (ii=1; ii<B->m; ii++) {
314     if (i[ii] < 0 || i[ii] < i[ii-1]) {
315       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
316     }
317   }
318   for (ii=0; ii<i[B->m]; ii++) {
319     if (j[ii] < 0 || j[ii] >= B->N) {
320       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
321     }
322   }
323 #endif
324 
325   b->j      = j;
326   b->i      = i;
327   b->values = values;
328 
329   b->nz               = i[B->m];
330   b->diag             = 0;
331   b->symmetric        = PETSC_FALSE;
332   b->freeaij          = PETSC_TRUE;
333 
334   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
335   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
336   PetscFunctionReturn(0);
337 }
338 EXTERN_C_END
339 
340 /*MC
341    MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
342    intended for use constructing orderings and partitionings.
343 
344   Level: beginner
345 
346 .seealso: MatCreateMPIAdj
347 M*/
348 
349 EXTERN_C_BEGIN
350 #undef __FUNCT__
351 #define __FUNCT__ "MatCreate_MPIAdj"
352 int MatCreate_MPIAdj(Mat B)
353 {
354   Mat_MPIAdj *b;
355   int        ii,ierr,size,rank;
356 
357   PetscFunctionBegin;
358   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
359   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
360 
361   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
362   B->data             = (void*)b;
363   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
364   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
365   B->factor           = 0;
366   B->lupivotthreshold = 1.0;
367   B->mapping          = 0;
368   B->assembled        = PETSC_FALSE;
369 
370   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
371   B->n = B->N;
372 
373   /* the information in the maps duplicates the information computed below, eventually
374      we should remove the duplicate information that is not contained in the maps */
375   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
376   /* we don't know the "local columns" so just use the row information :-(*/
377   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
378 
379   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
380   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
381   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
382   b->rowners[0] = 0;
383   for (ii=2; ii<=size; ii++) {
384     b->rowners[ii] += b->rowners[ii-1];
385   }
386   b->rstart = b->rowners[rank];
387   b->rend   = b->rowners[rank+1];
388   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
389                                     "MatMPIAdjSetPreallocation_MPIAdj",
390                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
391   PetscFunctionReturn(0);
392 }
393 EXTERN_C_END
394 
395 #undef __FUNCT__
396 #define __FUNCT__ "MatMPIAdjSetPreallocation"
397 /*@C
398    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
399 
400    Collective on MPI_Comm
401 
402    Input Parameters:
403 +  A - the matrix
404 .  i - the indices into j for the start of each row
405 .  j - the column indices for each row (sorted for each row).
406        The indices in i and j start with zero (NOT with one).
407 -  values - [optional] edge weights
408 
409    Level: intermediate
410 
411 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
412 @*/
413 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
414 {
415   int ierr,(*f)(Mat,int*,int*,int*);
416 
417   PetscFunctionBegin;
418   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
419   if (f) {
420     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
421   }
422   PetscFunctionReturn(0);
423 }
424 
425 #undef __FUNCT__
426 #define __FUNCT__ "MatCreateMPIAdj"
427 /*@C
428    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
429    The matrix does not have numerical values associated with it, but is
430    intended for ordering (to reduce bandwidth etc) and partitioning.
431 
432    Collective on MPI_Comm
433 
434    Input Parameters:
435 +  comm - MPI communicator
436 .  m - number of local rows
437 .  n - number of columns
438 .  i - the indices into j for the start of each row
439 .  j - the column indices for each row (sorted for each row).
440        The indices in i and j start with zero (NOT with one).
441 -  values -[optional] edge weights
442 
443    Output Parameter:
444 .  A - the matrix
445 
446    Level: intermediate
447 
448    Notes: This matrix object does not support most matrix operations, include
449    MatSetValues().
450    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
451    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
452     call from Fortran you need not create the arrays with PetscMalloc().
453    Should not include the matrix diagonals.
454 
455    If you already have a matrix, you can create its adjacency matrix by a call
456    to MatConvert, specifying a type of MATMPIADJ.
457 
458    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
459 
460 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
461 @*/
462 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
463 {
464   int        ierr;
465 
466   PetscFunctionBegin;
467   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
468   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
469   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
470   PetscFunctionReturn(0);
471 }
472 
473 EXTERN_C_BEGIN
474 #undef __FUNCT__
475 #define __FUNCT__ "MatConvertTo_MPIAdj"
476 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
477 {
478   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
479   PetscScalar  *ra;
480   MPI_Comm     comm;
481 
482   PetscFunctionBegin;
483   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
484   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
485   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
486 
487   /* count the number of nonzeros per row */
488   for (i=0; i<m; i++) {
489     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
490     for (j=0; j<len; j++) {
491       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
492     }
493     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
494     nzeros += len;
495   }
496 
497   /* malloc space for nonzeros */
498   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
499   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
500   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
501 
502   nzeros = 0;
503   ia[0]  = 0;
504   for (i=0; i<m; i++) {
505     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
506     cnt     = 0;
507     for (j=0; j<len; j++) {
508       if (rj[j] != i+rstart) { /* if not diagonal */
509         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
510         ja[nzeros+cnt++] = rj[j];
511       }
512     }
513     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
514     nzeros += cnt;
515     ia[i+1] = nzeros;
516   }
517 
518   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
519   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
520 
521   PetscFunctionReturn(0);
522 }
523 EXTERN_C_END
524 
525 
526 
527 
528 
529