xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision b5a2b587128f21047682b483fc0f96d9cc8e8a45)
1 /*$Id: mpiadj.c,v 1.61 2001/06/21 21:16:58 bsmith Exp buschelm $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   default:
91     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
92     break;
93   }
94   PetscFunctionReturn(0);
95 }
96 
97 
98 /*
99      Adds diagonal pointers to sparse matrix structure.
100 */
101 
102 #undef __FUNCT__
103 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
104 int MatMarkDiagonal_MPIAdj(Mat A)
105 {
106   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
107   int        i,j,*diag,m = A->m,ierr;
108 
109   PetscFunctionBegin;
110   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
111   PetscLogObjectMemory(A,(m+1)*sizeof(int));
112   for (i=0; i<A->m; i++) {
113     for (j=a->i[i]; j<a->i[i+1]; j++) {
114       if (a->j[j] == i) {
115         diag[i] = j;
116         break;
117       }
118     }
119   }
120   a->diag = diag;
121   PetscFunctionReturn(0);
122 }
123 
124 #undef __FUNCT__
125 #define __FUNCT__ "MatGetOwnershipRange_MPIAdj"
126 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
127 {
128   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
129   PetscFunctionBegin;
130   if (m) *m = a->rstart;
131   if (n) *n = a->rend;
132   PetscFunctionReturn(0);
133 }
134 
135 #undef __FUNCT__
136 #define __FUNCT__ "MatGetRow_MPIAdj"
137 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
138 {
139   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
140   int        *itmp;
141 
142   PetscFunctionBegin;
143   row -= a->rstart;
144 
145   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
146 
147   *nz = a->i[row+1] - a->i[row];
148   if (v) *v = PETSC_NULL;
149   if (idx) {
150     itmp = a->j + a->i[row];
151     if (*nz) {
152       *idx = itmp;
153     }
154     else *idx = 0;
155   }
156   PetscFunctionReturn(0);
157 }
158 
159 #undef __FUNCT__
160 #define __FUNCT__ "MatRestoreRow_MPIAdj"
161 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
162 {
163   PetscFunctionBegin;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
169 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
170 {
171   PetscFunctionBegin;
172   *bs = 1;
173   PetscFunctionReturn(0);
174 }
175 
176 
177 #undef __FUNCT__
178 #define __FUNCT__ "MatEqual_MPIAdj"
179 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
180 {
181   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
182   int         ierr;
183   PetscTruth  flag;
184 
185   PetscFunctionBegin;
186   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
187   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
188 
189   /* If the  matrix dimensions are not equal,or no of nonzeros */
190   if ((A->m != B->m) ||(a->nz != b->nz)) {
191     flag = PETSC_FALSE;
192   }
193 
194   /* if the a->i are the same */
195   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
196 
197   /* if a->j are the same */
198   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
199 
200   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
201   PetscFunctionReturn(0);
202 }
203 
204 #undef __FUNCT__
205 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
206 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
207 {
208   int        ierr,size,i;
209   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
210 
211   PetscFunctionBegin;
212   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
213   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
214   *m    = A->m;
215   *ia   = a->i;
216   *ja   = a->j;
217   *done = PETSC_TRUE;
218   if (oshift) {
219     for (i=0; i<(*ia)[*m]; i++) {
220       (*ja)[i]++;
221     }
222     for (i=0; i<=(*m); i++) (*ia)[i]++;
223   }
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
229 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
230 {
231   int        i;
232   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
233 
234   PetscFunctionBegin;
235   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
236   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
237   if (oshift) {
238     for (i=0; i<=(*m); i++) (*ia)[i]--;
239     for (i=0; i<(*ia)[*m]; i++) {
240       (*ja)[i]--;
241     }
242   }
243   PetscFunctionReturn(0);
244 }
245 
246 /* -------------------------------------------------------------------*/
247 static struct _MatOps MatOps_Values = {0,
248        MatGetRow_MPIAdj,
249        MatRestoreRow_MPIAdj,
250        0,
251        0,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        MatEqual_MPIAdj,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        MatSetOption_MPIAdj,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        MatGetOwnershipRange_MPIAdj,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        0,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        MatGetBlockSize_MPIAdj,
300        MatGetRowIJ_MPIAdj,
301        MatRestoreRowIJ_MPIAdj,
302        0,
303        0,
304        0,
305        0,
306        0,
307        0,
308        0,
309        0,
310        MatDestroy_MPIAdj,
311        MatView_MPIAdj,
312        MatGetMaps_Petsc};
313 
314 
315 EXTERN_C_BEGIN
316 #undef __FUNCT__
317 #define __FUNCT__ "MatCreate_MPIAdj"
318 int MatCreate_MPIAdj(Mat B)
319 {
320   Mat_MPIAdj *b;
321   int        ii,ierr,size,rank;
322 
323   PetscFunctionBegin;
324   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
325   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
326 
327   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
328   B->data             = (void*)b;
329   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
330   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
331   B->factor           = 0;
332   B->lupivotthreshold = 1.0;
333   B->mapping          = 0;
334   B->assembled        = PETSC_FALSE;
335 
336   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
337   B->n = B->N;
338 
339   /* the information in the maps duplicates the information computed below, eventually
340      we should remove the duplicate information that is not contained in the maps */
341   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
342   /* we don't know the "local columns" so just use the row information :-(*/
343   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
344 
345   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
346   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
347   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
348   b->rowners[0] = 0;
349   for (ii=2; ii<=size; ii++) {
350     b->rowners[ii] += b->rowners[ii-1];
351   }
352   b->rstart = b->rowners[rank];
353   b->rend   = b->rowners[rank+1];
354 
355   PetscFunctionReturn(0);
356 }
357 EXTERN_C_END
358 
359 #undef __FUNCT__
360 #define __FUNCT__ "MatMPIAdjSetPreallocation"
361 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
362 {
363   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
364   int        ierr;
365 #if defined(PETSC_USE_BOPT_g)
366   int        ii;
367 #endif
368 
369   PetscFunctionBegin;
370   B->preallocated = PETSC_TRUE;
371 #if defined(PETSC_USE_BOPT_g)
372   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
373   for (ii=1; ii<B->m; ii++) {
374     if (i[ii] < 0 || i[ii] < i[ii-1]) {
375       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
376     }
377   }
378   for (ii=0; ii<i[B->m]; ii++) {
379     if (j[ii] < 0 || j[ii] >= B->N) {
380       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
381     }
382   }
383 #endif
384 
385   b->j      = j;
386   b->i      = i;
387   b->values = values;
388 
389   b->nz               = i[B->m];
390   b->diag             = 0;
391   b->symmetric        = PETSC_FALSE;
392   b->freeaij          = PETSC_TRUE;
393 
394   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
395   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
396   PetscFunctionReturn(0);
397 }
398 
399 #undef __FUNCT__
400 #define __FUNCT__ "MatCreateMPIAdj"
401 /*@C
402    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
403    The matrix does not have numerical values associated with it, but is
404    intended for ordering (to reduce bandwidth etc) and partitioning.
405 
406    Collective on MPI_Comm
407 
408    Input Parameters:
409 +  comm - MPI communicator
410 .  m - number of local rows
411 .  n - number of columns
412 .  i - the indices into j for the start of each row
413 .  j - the column indices for each row (sorted for each row).
414        The indices in i and j start with zero (NOT with one).
415 -  values -[optional] edge weights
416 
417    Output Parameter:
418 .  A - the matrix
419 
420    Level: intermediate
421 
422    Notes: This matrix object does not support most matrix operations, include
423    MatSetValues().
424    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
425    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
426     call from Fortran you need not create the arrays with PetscMalloc().
427    Should not include the matrix diagonals.
428 
429    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
430 
431 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
432 @*/
433 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
434 {
435   int        ierr;
436 
437   PetscFunctionBegin;
438   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
439   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
440   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
441   PetscFunctionReturn(0);
442 }
443 
444 EXTERN_C_BEGIN
445 #undef __FUNCT__
446 #define __FUNCT__ "MatConvertTo_MPIAdj"
447 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
448 {
449   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
450   Scalar   *ra;
451   MPI_Comm comm;
452 
453   PetscFunctionBegin;
454   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
455   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
456   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
457 
458   /* count the number of nonzeros per row */
459   for (i=0; i<m; i++) {
460     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
461     for (j=0; j<len; j++) {
462       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
463     }
464     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
465     nzeros += len;
466   }
467 
468   /* malloc space for nonzeros */
469   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
470   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
471   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
472 
473   nzeros = 0;
474   ia[0]  = 0;
475   for (i=0; i<m; i++) {
476     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
477     cnt     = 0;
478     for (j=0; j<len; j++) {
479       if (rj[j] != i+rstart) { /* if not diagonal */
480         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
481         ja[nzeros+cnt++] = rj[j];
482       }
483     }
484     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
485     nzeros += cnt;
486     ia[i+1] = nzeros;
487   }
488 
489   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
490   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
491 
492   PetscFunctionReturn(0);
493 }
494 EXTERN_C_END
495 
496 
497 
498 
499 
500