xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision a2ec6df8280ae283e8b0f45d20ebe04f5d3dfc7b)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "src/mat/impls/adj/mpi/mpiadj.h"
7 #include "petscsys.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   default:
91     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
92     break;
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 
98 /*
99      Adds diagonal pointers to sparse matrix structure.
100 */
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
104 int MatMarkDiagonal_MPIAdj(Mat A)
105 {
106   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
107   int        i,j,*diag,m = A->m,ierr;
108 
109   PetscFunctionBegin;
110   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
111   PetscLogObjectMemory(A,(m+1)*sizeof(int));
112   for (i=0; i<A->m; i++) {
113     for (j=a->i[i]; j<a->i[i+1]; j++) {
114       if (a->j[j] == i) {
115         diag[i] = j;
116         break;
117       }
118     }
119   }
120   a->diag = diag;
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "MatGetRow_MPIAdj"
126 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
127 {
128   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
129   int        *itmp;
130 
131   PetscFunctionBegin;
132   row -= a->rstart;
133 
134   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
135 
136   *nz = a->i[row+1] - a->i[row];
137   if (v) *v = PETSC_NULL;
138   if (idx) {
139     itmp = a->j + a->i[row];
140     if (*nz) {
141       *idx = itmp;
142     }
143     else *idx = 0;
144   }
145   PetscFunctionReturn(0);
146 }
147 
148 #undef __FUNCT__
149 #define __FUNCT__ "MatRestoreRow_MPIAdj"
150 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
151 {
152   PetscFunctionBegin;
153   PetscFunctionReturn(0);
154 }
155 
156 #undef __FUNCT__
157 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
158 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
159 {
160   PetscFunctionBegin;
161   *bs = 1;
162   PetscFunctionReturn(0);
163 }
164 
165 
166 #undef __FUNCT__
167 #define __FUNCT__ "MatEqual_MPIAdj"
168 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
169 {
170   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
171   int         ierr;
172   PetscTruth  flag;
173 
174   PetscFunctionBegin;
175   /* If the  matrix dimensions are not equal,or no of nonzeros */
176   if ((A->m != B->m) ||(a->nz != b->nz)) {
177     flag = PETSC_FALSE;
178   }
179 
180   /* if the a->i are the same */
181   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
182 
183   /* if a->j are the same */
184   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
185 
186   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
187   PetscFunctionReturn(0);
188 }
189 
190 #undef __FUNCT__
191 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
192 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
193 {
194   int        ierr,size,i;
195   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
196 
197   PetscFunctionBegin;
198   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
199   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
200   *m    = A->m;
201   *ia   = a->i;
202   *ja   = a->j;
203   *done = PETSC_TRUE;
204   if (oshift) {
205     for (i=0; i<(*ia)[*m]; i++) {
206       (*ja)[i]++;
207     }
208     for (i=0; i<=(*m); i++) (*ia)[i]++;
209   }
210   PetscFunctionReturn(0);
211 }
212 
213 #undef __FUNCT__
214 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
215 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int *ia[],int *ja[],PetscTruth *done)
216 {
217   int        i;
218   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
219 
220   PetscFunctionBegin;
221   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
222   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
223   if (oshift) {
224     for (i=0; i<=(*m); i++) (*ia)[i]--;
225     for (i=0; i<(*ia)[*m]; i++) {
226       (*ja)[i]--;
227     }
228   }
229   PetscFunctionReturn(0);
230 }
231 
232 /* -------------------------------------------------------------------*/
233 static struct _MatOps MatOps_Values = {0,
234        MatGetRow_MPIAdj,
235        MatRestoreRow_MPIAdj,
236        0,
237 /* 4*/ 0,
238        0,
239        0,
240        0,
241        0,
242        0,
243 /*10*/ 0,
244        0,
245        0,
246        0,
247        0,
248 /*15*/ 0,
249        MatEqual_MPIAdj,
250        0,
251        0,
252        0,
253 /*20*/ 0,
254        0,
255        0,
256        MatSetOption_MPIAdj,
257        0,
258 /*25*/ 0,
259        0,
260        0,
261        0,
262        0,
263 /*30*/ 0,
264        0,
265        0,
266        0,
267        0,
268 /*35*/ 0,
269        0,
270        0,
271        0,
272        0,
273 /*40*/ 0,
274        0,
275        0,
276        0,
277        0,
278 /*45*/ 0,
279        0,
280        0,
281        0,
282        0,
283 /*50*/ MatGetBlockSize_MPIAdj,
284        MatGetRowIJ_MPIAdj,
285        MatRestoreRowIJ_MPIAdj,
286        0,
287        0,
288 /*55*/ 0,
289        0,
290        0,
291        0,
292        0,
293 /*60*/ 0,
294        MatDestroy_MPIAdj,
295        MatView_MPIAdj,
296        MatGetPetscMaps_Petsc,
297        0,
298 /*65*/ 0,
299        0,
300        0,
301        0,
302        0,
303 /*70*/ 0,
304        0,
305        0,
306        0,
307        0,
308 /*75*/ 0,
309        0,
310        0,
311        0,
312        0,
313 /*80*/ 0,
314        0,
315        0,
316        0,
317        0,
318 /*85*/ 0
319 };
320 
321 EXTERN_C_BEGIN
322 #undef __FUNCT__
323 #define __FUNCT__ "MatMPIAdjSetPreallocation_MPIAdj"
324 int MatMPIAdjSetPreallocation_MPIAdj(Mat B,int *i,int *j,int *values)
325 {
326   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
327   int        ierr;
328 #if defined(PETSC_USE_BOPT_g)
329   int        ii;
330 #endif
331 
332   PetscFunctionBegin;
333   B->preallocated = PETSC_TRUE;
334 #if defined(PETSC_USE_BOPT_g)
335   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
336   for (ii=1; ii<B->m; ii++) {
337     if (i[ii] < 0 || i[ii] < i[ii-1]) {
338       SETERRQ4(1,"i[%d]=%d index is out of range: i[%d]=%d",ii,i[ii],ii-1,i[ii-1]);
339     }
340   }
341   for (ii=0; ii<i[B->m]; ii++) {
342     if (j[ii] < 0 || j[ii] >= B->N) {
343       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
344     }
345   }
346 #endif
347 
348   b->j      = j;
349   b->i      = i;
350   b->values = values;
351 
352   b->nz               = i[B->m];
353   b->diag             = 0;
354   b->symmetric        = PETSC_FALSE;
355   b->freeaij          = PETSC_TRUE;
356 
357   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
358   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
359   PetscFunctionReturn(0);
360 }
361 EXTERN_C_END
362 
363 /*MC
364    MATMPIADJ = "mpiadj" - A matrix type to be used for distributed adjacency matrices,
365    intended for use constructing orderings and partitionings.
366 
367   Level: beginner
368 
369 .seealso: MatCreateMPIAdj
370 M*/
371 
372 EXTERN_C_BEGIN
373 #undef __FUNCT__
374 #define __FUNCT__ "MatCreate_MPIAdj"
375 int MatCreate_MPIAdj(Mat B)
376 {
377   Mat_MPIAdj *b;
378   int        ii,ierr,size,rank;
379 
380   PetscFunctionBegin;
381   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
382   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
383 
384   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
385   B->data             = (void*)b;
386   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
387   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
388   B->factor           = 0;
389   B->lupivotthreshold = 1.0;
390   B->mapping          = 0;
391   B->assembled        = PETSC_FALSE;
392 
393   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
394   B->n = B->N;
395 
396   /* the information in the maps duplicates the information computed below, eventually
397      we should remove the duplicate information that is not contained in the maps */
398   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
399   /* we don't know the "local columns" so just use the row information :-(*/
400   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
401 
402   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
403   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
404   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
405   b->rowners[0] = 0;
406   for (ii=2; ii<=size; ii++) {
407     b->rowners[ii] += b->rowners[ii-1];
408   }
409   b->rstart = b->rowners[rank];
410   b->rend   = b->rowners[rank+1];
411   ierr = PetscObjectComposeFunctionDynamic((PetscObject)B,"MatMPIAdjSetPreallocation_C",
412                                     "MatMPIAdjSetPreallocation_MPIAdj",
413                                      MatMPIAdjSetPreallocation_MPIAdj);CHKERRQ(ierr);
414   PetscFunctionReturn(0);
415 }
416 EXTERN_C_END
417 
418 #undef __FUNCT__
419 #define __FUNCT__ "MatMPIAdjSetPreallocation"
420 /*@C
421    MatMPIAdjSetPreallocation - Sets the array used for storing the matrix elements
422 
423    Collective on MPI_Comm
424 
425    Input Parameters:
426 +  A - the matrix
427 .  i - the indices into j for the start of each row
428 .  j - the column indices for each row (sorted for each row).
429        The indices in i and j start with zero (NOT with one).
430 -  values - [optional] edge weights
431 
432    Level: intermediate
433 
434 .seealso: MatCreate(), MatCreateMPIAdj(), MatSetValues()
435 @*/
436 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
437 {
438   int ierr,(*f)(Mat,int*,int*,int*);
439 
440   PetscFunctionBegin;
441   ierr = PetscObjectQueryFunction((PetscObject)B,"MatMPIAdjSetPreallocation_C",(void (**)(void))&f);CHKERRQ(ierr);
442   if (f) {
443     ierr = (*f)(B,i,j,values);CHKERRQ(ierr);
444   }
445   PetscFunctionReturn(0);
446 }
447 
448 #undef __FUNCT__
449 #define __FUNCT__ "MatCreateMPIAdj"
450 /*@C
451    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
452    The matrix does not have numerical values associated with it, but is
453    intended for ordering (to reduce bandwidth etc) and partitioning.
454 
455    Collective on MPI_Comm
456 
457    Input Parameters:
458 +  comm - MPI communicator
459 .  m - number of local rows
460 .  n - number of columns
461 .  i - the indices into j for the start of each row
462 .  j - the column indices for each row (sorted for each row).
463        The indices in i and j start with zero (NOT with one).
464 -  values -[optional] edge weights
465 
466    Output Parameter:
467 .  A - the matrix
468 
469    Level: intermediate
470 
471    Notes: This matrix object does not support most matrix operations, include
472    MatSetValues().
473    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
474    when the matrix is destroyed; you must allocate them with PetscMalloc(). If you
475     call from Fortran you need not create the arrays with PetscMalloc().
476    Should not include the matrix diagonals.
477 
478    If you already have a matrix, you can create its adjacency matrix by a call
479    to MatConvert, specifying a type of MATMPIADJ.
480 
481    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
482 
483 .seealso: MatCreate(), MatConvert(), MatGetOrdering()
484 @*/
485 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
486 {
487   int        ierr;
488 
489   PetscFunctionBegin;
490   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
491   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
492   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
493   PetscFunctionReturn(0);
494 }
495 
496 EXTERN_C_BEGIN
497 #undef __FUNCT__
498 #define __FUNCT__ "MatConvertTo_MPIAdj"
499 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
500 {
501   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
502   PetscScalar  *ra;
503   MPI_Comm     comm;
504 
505   PetscFunctionBegin;
506   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
507   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
508   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
509 
510   /* count the number of nonzeros per row */
511   for (i=0; i<m; i++) {
512     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
513     for (j=0; j<len; j++) {
514       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
515     }
516     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
517     nzeros += len;
518   }
519 
520   /* malloc space for nonzeros */
521   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
522   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
523   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
524 
525   nzeros = 0;
526   ia[0]  = 0;
527   for (i=0; i<m; i++) {
528     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
529     cnt     = 0;
530     for (j=0; j<len; j++) {
531       if (rj[j] != i+rstart) { /* if not diagonal */
532         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
533         ja[nzeros+cnt++] = rj[j];
534       }
535     }
536     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
537     nzeros += cnt;
538     ia[i+1] = nzeros;
539   }
540 
541   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
542   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
543 
544   PetscFunctionReturn(0);
545 }
546 EXTERN_C_END
547 
548 
549 
550 
551 
552