xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 273d9f13de75c4ed17021f7f2c11eebb99d26f0d)
1 /*$Id: mpiadj.c,v 1.48 2000/09/28 21:11:35 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = A->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else {
23     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
24     for (i=0; i<m; i++) {
25       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
26       for (j=a->i[i]; j<a->i[i+1]; j++) {
27         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
28       }
29       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
30     }
31     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
32   }
33   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
34   PetscFunctionReturn(0);
35 }
36 
37 #undef __FUNC__
38 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
39 int MatView_MPIAdj(Mat A,Viewer viewer)
40 {
41   int        ierr;
42   PetscTruth isascii;
43 
44   PetscFunctionBegin;
45   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
46   if (isascii) {
47     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
48   } else {
49     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
50   }
51   PetscFunctionReturn(0);
52 }
53 
54 #undef __FUNC__
55 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
56 int MatDestroy_MPIAdj(Mat mat)
57 {
58   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
59   int        ierr;
60 
61   PetscFunctionBegin;
62 #if defined(PETSC_USE_LOG)
63   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
64 #endif
65   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
66   if (a->freeaij) {
67     ierr = PetscFree(a->i);CHKERRQ(ierr);
68     ierr = PetscFree(a->j);CHKERRQ(ierr);
69     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
70   }
71   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
72   ierr = PetscFree(a);CHKERRQ(ierr);
73   PetscFunctionReturn(0);
74 }
75 
76 #undef __FUNC__
77 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
78 int MatSetOption_MPIAdj(Mat A,MatOption op)
79 {
80   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
81 
82   PetscFunctionBegin;
83   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
84     a->symmetric = PETSC_TRUE;
85   } else {
86     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
87   }
88   PetscFunctionReturn(0);
89 }
90 
91 
92 /*
93      Adds diagonal pointers to sparse matrix structure.
94 */
95 
96 #undef __FUNC__
97 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
98 int MatMarkDiagonal_MPIAdj(Mat A)
99 {
100   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
101   int        i,j,*diag,m = A->m;
102 
103   PetscFunctionBegin;
104   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
105   PLogObjectMemory(A,(m+1)*sizeof(int));
106   for (i=0; i<A->m; i++) {
107     for (j=a->i[i]; j<a->i[i+1]; j++) {
108       if (a->j[j] == i) {
109         diag[i] = j;
110         break;
111       }
112     }
113   }
114   a->diag = diag;
115   PetscFunctionReturn(0);
116 }
117 
118 #undef __FUNC__
119 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
120 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
121 {
122   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
123   PetscFunctionBegin;
124   if (m) *m = a->rstart;
125   if (n) *n = a->rend;
126   PetscFunctionReturn(0);
127 }
128 
129 #undef __FUNC__
130 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
131 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
132 {
133   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
134   int        *itmp;
135 
136   PetscFunctionBegin;
137   row -= a->rstart;
138 
139   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
140 
141   *nz = a->i[row+1] - a->i[row];
142   if (v) *v = PETSC_NULL;
143   if (idx) {
144     itmp = a->j + a->i[row];
145     if (*nz) {
146       *idx = itmp;
147     }
148     else *idx = 0;
149   }
150   PetscFunctionReturn(0);
151 }
152 
153 #undef __FUNC__
154 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
155 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
156 {
157   PetscFunctionBegin;
158   PetscFunctionReturn(0);
159 }
160 
161 #undef __FUNC__
162 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
163 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
164 {
165   PetscFunctionBegin;
166   *bs = 1;
167   PetscFunctionReturn(0);
168 }
169 
170 
171 #undef __FUNC__
172 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
173 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
174 {
175   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
176   int         ierr;
177   PetscTruth  flag;
178 
179   PetscFunctionBegin;
180   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
181   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
182 
183   /* If the  matrix dimensions are not equal,or no of nonzeros */
184   if ((A->m != B->m) ||(a->nz != b->nz)) {
185     flag = PETSC_FALSE;
186   }
187 
188   /* if the a->i are the same */
189   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
190 
191   /* if a->j are the same */
192   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
193 
194   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
195   PetscFunctionReturn(0);
196 }
197 
198 
199 /* -------------------------------------------------------------------*/
200 static struct _MatOps MatOps_Values = {0,
201        MatGetRow_MPIAdj,
202        MatRestoreRow_MPIAdj,
203        0,
204        0,
205        0,
206        0,
207        0,
208        0,
209        0,
210        0,
211        0,
212        0,
213        0,
214        0,
215        0,
216        MatEqual_MPIAdj,
217        0,
218        0,
219        0,
220        0,
221        0,
222        0,
223        MatSetOption_MPIAdj,
224        0,
225        0,
226        0,
227        0,
228        0,
229        0,
230        0,
231        0,
232        MatGetOwnershipRange_MPIAdj,
233        0,
234        0,
235        0,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        MatGetBlockSize_MPIAdj,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        MatDestroy_MPIAdj,
264        MatView_MPIAdj,
265        MatGetMaps_Petsc};
266 
267 
268 EXTERN_C_BEGIN
269 #undef __FUNC__
270 #define __FUNC__ /*<a name=""></a>*/"MatCreate_MPIAdj"
271 int MatCreate_MPIAdj(Mat B)
272 {
273   Mat_MPIAdj *b;
274   int        ii,ierr,size,rank;
275 
276   PetscFunctionBegin;
277   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
278   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
279 
280   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
281   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
282   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
283   B->factor           = 0;
284   B->lupivotthreshold = 1.0;
285   B->mapping          = 0;
286   B->assembled        = PETSC_FALSE;
287 
288   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
289   B->n = B->N;
290 
291   /* the information in the maps duplicates the information computed below, eventually
292      we should remove the duplicate information that is not contained in the maps */
293   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
294   /* we don't know the "local columns" so just use the row information :-(*/
295   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
296 
297   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
298   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
299   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
300   b->rowners[0] = 0;
301   for (ii=2; ii<=size; ii++) {
302     b->rowners[ii] += b->rowners[ii-1];
303   }
304   b->rstart = b->rowners[rank];
305   b->rend   = b->rowners[rank+1];
306 
307   PetscFunctionReturn(0);
308 }
309 EXTERN_C_END
310 
311 #undef __FUNC__
312 #define __FUNC__ /*<a name=""></a>*/"MatMPIAdjSetPreallocation"
313 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
314 {
315   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
316   int        ierr;
317 #if defined(PETSC_USE_BOPT_g)
318   int        ii;
319 #endif
320 
321   PetscFunctionBegin;
322   B->preallocated = PETSC_TRUE;
323 #if defined(PETSC_USE_BOPT_g)
324   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
325   for (ii=1; ii<B->m; ii++) {
326     if (i[ii] < 0 || i[ii] < i[ii-1]) {
327       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
328     }
329   }
330   for (ii=0; ii<i[B->m]; ii++) {
331     if (j[ii] < 0 || j[ii] >= B->N) {
332       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
333     }
334   }
335 #endif
336 
337   b->j      = j;
338   b->i      = i;
339   b->values = values;
340 
341   b->nz               = i[B->m];
342   b->diag             = 0;
343   b->symmetric        = PETSC_FALSE;
344   b->freeaij          = PETSC_TRUE;
345 
346   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
347   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
348   PetscFunctionReturn(0);
349 }
350 
351 #undef __FUNC__
352 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
353 /*@C
354    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
355    The matrix does not have numerical values associated with it, but is
356    intended for ordering (to reduce bandwidth etc) and partitioning.
357 
358    Collective on MPI_Comm
359 
360    Input Parameters:
361 +  comm - MPI communicator
362 .  m - number of local rows
363 .  n - number of columns
364 .  i - the indices into j for the start of each row
365 .  j - the column indices for each row (sorted for each row).
366        The indices in i and j start with zero (NOT with one).
367 -  values -[optional] edge weights
368 
369    Output Parameter:
370 .  A - the matrix
371 
372    Level: intermediate
373 
374    Notes: This matrix object does not support most matrix operations, include
375    MatSetValues().
376    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
377    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
378     call from Fortran you need not create the arrays with PetscMalloc().
379    Should not include the matrix diagonals.
380 
381    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
382 
383 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
384 @*/
385 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
386 {
387   int        ierr;
388 
389   PetscFunctionBegin;
390   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
391   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
392   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
393   PetscFunctionReturn(0);
394 }
395 
396 EXTERN_C_BEGIN
397 #undef __FUNC__
398 #define __FUNC__ /*<a name=""></a>*/"MatConvertTo_MPIAdj"
399 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
400 {
401   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
402   Scalar   *ra;
403   MPI_Comm comm;
404 
405   PetscFunctionBegin;
406   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
407   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
408   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
409 
410   /* count the number of nonzeros per row */
411   for (i=0; i<m; i++) {
412     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
413     for (j=0; j<len; j++) {
414       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
415     }
416     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
417     nzeros += len;
418   }
419 
420   /* malloc space for nonzeros */
421   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
422   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
423   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
424 
425   nzeros = 0;
426   ia[0]  = 0;
427   for (i=0; i<m; i++) {
428     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
429     cnt     = 0;
430     for (j=0; j<len; j++) {
431       if (rj[j] != i+rstart) { /* if not diagonal */
432         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
433         ja[nzeros+cnt++] = rj[j];
434       }
435     }
436     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
437     nzeros += cnt;
438     ia[i+1] = nzeros;
439   }
440 
441   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
442   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
443 
444   PetscFunctionReturn(0);
445 }
446 EXTERN_C_END
447 
448 
449 
450 
451 
452