xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 6cd91f6405a6358e146373438b2d6bb0b12d1c74)
1 /*$Id: mpiadj.c,v 1.50 2000/10/30 03:28:19 bsmith Exp bsmith $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNC__
10 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
12 {
13   Mat_MPIAdj  *a = (Mat_MPIAdj*)A->data;
14   int         ierr,i,j,m = A->m, format;
15   char        *outputname;
16 
17   PetscFunctionBegin;
18   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
19   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
20   if (format == VIEWER_FORMAT_ASCII_INFO) {
21     PetscFunctionReturn(0);
22   } else if (format == VIEWER_FORMAT_ASCII_MATLAB) {
23     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
24   } else {
25     ierr = ViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
26     for (i=0; i<m; i++) {
27       ierr = ViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
28       for (j=a->i[i]; j<a->i[i+1]; j++) {
29         ierr = ViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
30       }
31       ierr = ViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
32     }
33     ierr = ViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
34   }
35   ierr = ViewerFlush(viewer);CHKERRQ(ierr);
36   PetscFunctionReturn(0);
37 }
38 
39 #undef __FUNC__
40 #define __FUNC__ /*<a name=""></a>*/"MatView_MPIAdj"
41 int MatView_MPIAdj(Mat A,Viewer viewer)
42 {
43   int        ierr;
44   PetscTruth isascii;
45 
46   PetscFunctionBegin;
47   ierr = PetscTypeCompare((PetscObject)viewer,ASCII_VIEWER,&isascii);CHKERRQ(ierr);
48   if (isascii) {
49     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
50   } else {
51     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
52   }
53   PetscFunctionReturn(0);
54 }
55 
56 #undef __FUNC__
57 #define __FUNC__ /*<a name=""></a>*/"MatDestroy_MPIAdj"
58 int MatDestroy_MPIAdj(Mat mat)
59 {
60   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
61   int        ierr;
62 
63   PetscFunctionBegin;
64 #if defined(PETSC_USE_LOG)
65   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
66 #endif
67   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
68   if (a->freeaij) {
69     ierr = PetscFree(a->i);CHKERRQ(ierr);
70     ierr = PetscFree(a->j);CHKERRQ(ierr);
71     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
72   }
73   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
74   ierr = PetscFree(a);CHKERRQ(ierr);
75   PetscFunctionReturn(0);
76 }
77 
78 #undef __FUNC__
79 #define __FUNC__ /*<a name=""></a>*/"MatSetOption_MPIAdj"
80 int MatSetOption_MPIAdj(Mat A,MatOption op)
81 {
82   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
83 
84   PetscFunctionBegin;
85   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
86     a->symmetric = PETSC_TRUE;
87   } else {
88     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
89   }
90   PetscFunctionReturn(0);
91 }
92 
93 
94 /*
95      Adds diagonal pointers to sparse matrix structure.
96 */
97 
98 #undef __FUNC__
99 #define __FUNC__ /*<a name=""></a>*/"MatMarkDiagonal_MPIAdj"
100 int MatMarkDiagonal_MPIAdj(Mat A)
101 {
102   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
103   int        i,j,*diag,m = A->m;
104 
105   PetscFunctionBegin;
106   diag = (int*)PetscMalloc((m+1)*sizeof(int));CHKPTRQ(diag);
107   PLogObjectMemory(A,(m+1)*sizeof(int));
108   for (i=0; i<A->m; i++) {
109     for (j=a->i[i]; j<a->i[i+1]; j++) {
110       if (a->j[j] == i) {
111         diag[i] = j;
112         break;
113       }
114     }
115   }
116   a->diag = diag;
117   PetscFunctionReturn(0);
118 }
119 
120 #undef __FUNC__
121 #define __FUNC__ /*<a name=""></a>*/"MatGetOwnershipRange_MPIAdj"
122 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
123 {
124   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
125   PetscFunctionBegin;
126   if (m) *m = a->rstart;
127   if (n) *n = a->rend;
128   PetscFunctionReturn(0);
129 }
130 
131 #undef __FUNC__
132 #define __FUNC__ /*<a name=""></a>*/"MatGetRow_MPIAdj"
133 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
134 {
135   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
136   int        *itmp;
137 
138   PetscFunctionBegin;
139   row -= a->rstart;
140 
141   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
142 
143   *nz = a->i[row+1] - a->i[row];
144   if (v) *v = PETSC_NULL;
145   if (idx) {
146     itmp = a->j + a->i[row];
147     if (*nz) {
148       *idx = itmp;
149     }
150     else *idx = 0;
151   }
152   PetscFunctionReturn(0);
153 }
154 
155 #undef __FUNC__
156 #define __FUNC__ /*<a name=""></a>*/"MatRestoreRow_MPIAdj"
157 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
158 {
159   PetscFunctionBegin;
160   PetscFunctionReturn(0);
161 }
162 
163 #undef __FUNC__
164 #define __FUNC__ /*<a name=""></a>*/"MatGetBlockSize_MPIAdj"
165 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
166 {
167   PetscFunctionBegin;
168   *bs = 1;
169   PetscFunctionReturn(0);
170 }
171 
172 
173 #undef __FUNC__
174 #define __FUNC__ /*<a name=""></a>*/"MatEqual_MPIAdj"
175 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
176 {
177   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
178   int         ierr;
179   PetscTruth  flag;
180 
181   PetscFunctionBegin;
182   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
183   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
184 
185   /* If the  matrix dimensions are not equal,or no of nonzeros */
186   if ((A->m != B->m) ||(a->nz != b->nz)) {
187     flag = PETSC_FALSE;
188   }
189 
190   /* if the a->i are the same */
191   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
192 
193   /* if a->j are the same */
194   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
195 
196   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
197   PetscFunctionReturn(0);
198 }
199 
200 #undef __FUNC__
201 #define __FUNC__ /*<a name="MatGetRowIJ_MPIAdj"></a>*/"MatGetRowIJ_MPIAdj"
202 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
203 {
204   int        ierr,size;
205   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
206 
207   PetscFunctionBegin;
208   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
209   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
210   if (oshift)   {*done = PETSC_FALSE; PetscFunctionReturn(0);}
211   *m    = A->m;
212   *ia   = a->i;
213   *ja   = a->j;
214   *done = PETSC_TRUE;
215   PetscFunctionReturn(0);
216 }
217 
218 /* -------------------------------------------------------------------*/
219 static struct _MatOps MatOps_Values = {0,
220        MatGetRow_MPIAdj,
221        MatRestoreRow_MPIAdj,
222        0,
223        0,
224        0,
225        0,
226        0,
227        0,
228        0,
229        0,
230        0,
231        0,
232        0,
233        0,
234        0,
235        MatEqual_MPIAdj,
236        0,
237        0,
238        0,
239        0,
240        0,
241        0,
242        MatSetOption_MPIAdj,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        MatGetOwnershipRange_MPIAdj,
252        0,
253        0,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        0,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        MatGetBlockSize_MPIAdj,
272        MatGetRowIJ_MPIAdj,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        MatDestroy_MPIAdj,
283        MatView_MPIAdj,
284        MatGetMaps_Petsc};
285 
286 
287 EXTERN_C_BEGIN
288 #undef __FUNC__
289 #define __FUNC__ /*<a name=""></a>*/"MatCreate_MPIAdj"
290 int MatCreate_MPIAdj(Mat B)
291 {
292   Mat_MPIAdj *b;
293   int        ii,ierr,size,rank;
294 
295   PetscFunctionBegin;
296   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
297   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
298 
299   B->data             = (void*)(b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
300   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
301   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
302   B->factor           = 0;
303   B->lupivotthreshold = 1.0;
304   B->mapping          = 0;
305   B->assembled        = PETSC_FALSE;
306 
307   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
308   B->n = B->N;
309 
310   /* the information in the maps duplicates the information computed below, eventually
311      we should remove the duplicate information that is not contained in the maps */
312   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
313   /* we don't know the "local columns" so just use the row information :-(*/
314   ierr = MapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
315 
316   b->rowners = (int*)PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
317   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
318   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
319   b->rowners[0] = 0;
320   for (ii=2; ii<=size; ii++) {
321     b->rowners[ii] += b->rowners[ii-1];
322   }
323   b->rstart = b->rowners[rank];
324   b->rend   = b->rowners[rank+1];
325 
326   PetscFunctionReturn(0);
327 }
328 EXTERN_C_END
329 
330 #undef __FUNC__
331 #define __FUNC__ /*<a name=""></a>*/"MatMPIAdjSetPreallocation"
332 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
333 {
334   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
335   int        ierr;
336 #if defined(PETSC_USE_BOPT_g)
337   int        ii;
338 #endif
339 
340   PetscFunctionBegin;
341   B->preallocated = PETSC_TRUE;
342 #if defined(PETSC_USE_BOPT_g)
343   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
344   for (ii=1; ii<B->m; ii++) {
345     if (i[ii] < 0 || i[ii] < i[ii-1]) {
346       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
347     }
348   }
349   for (ii=0; ii<i[B->m]; ii++) {
350     if (j[ii] < 0 || j[ii] >= B->N) {
351       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
352     }
353   }
354 #endif
355 
356   b->j      = j;
357   b->i      = i;
358   b->values = values;
359 
360   b->nz               = i[B->m];
361   b->diag             = 0;
362   b->symmetric        = PETSC_FALSE;
363   b->freeaij          = PETSC_TRUE;
364 
365   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
366   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
367   PetscFunctionReturn(0);
368 }
369 
370 #undef __FUNC__
371 #define __FUNC__ /*<a name=""></a>*/"MatCreateMPIAdj"
372 /*@C
373    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
374    The matrix does not have numerical values associated with it, but is
375    intended for ordering (to reduce bandwidth etc) and partitioning.
376 
377    Collective on MPI_Comm
378 
379    Input Parameters:
380 +  comm - MPI communicator
381 .  m - number of local rows
382 .  n - number of columns
383 .  i - the indices into j for the start of each row
384 .  j - the column indices for each row (sorted for each row).
385        The indices in i and j start with zero (NOT with one).
386 -  values -[optional] edge weights
387 
388    Output Parameter:
389 .  A - the matrix
390 
391    Level: intermediate
392 
393    Notes: This matrix object does not support most matrix operations, include
394    MatSetValues().
395    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
396    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
397     call from Fortran you need not create the arrays with PetscMalloc().
398    Should not include the matrix diagonals.
399 
400    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
401 
402 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
403 @*/
404 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
405 {
406   int        ierr;
407 
408   PetscFunctionBegin;
409   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
410   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
411   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
412   PetscFunctionReturn(0);
413 }
414 
415 EXTERN_C_BEGIN
416 #undef __FUNC__
417 #define __FUNC__ /*<a name=""></a>*/"MatConvertTo_MPIAdj"
418 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
419 {
420   int      i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
421   Scalar   *ra;
422   MPI_Comm comm;
423 
424   PetscFunctionBegin;
425   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
426   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
427   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
428 
429   /* count the number of nonzeros per row */
430   for (i=0; i<m; i++) {
431     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
432     for (j=0; j<len; j++) {
433       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
434     }
435     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
436     nzeros += len;
437   }
438 
439   /* malloc space for nonzeros */
440   a  = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(a);
441   ia = (int*)PetscMalloc((N+1)*sizeof(int));CHKPTRQ(ia);
442   ja = (int*)PetscMalloc((nzeros+1)*sizeof(int));CHKPTRQ(ja);
443 
444   nzeros = 0;
445   ia[0]  = 0;
446   for (i=0; i<m; i++) {
447     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
448     cnt     = 0;
449     for (j=0; j<len; j++) {
450       if (rj[j] != i+rstart) { /* if not diagonal */
451         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
452         ja[nzeros+cnt++] = rj[j];
453       }
454     }
455     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
456     nzeros += cnt;
457     ia[i+1] = nzeros;
458   }
459 
460   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
461   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
462 
463   PetscFunctionReturn(0);
464 }
465 EXTERN_C_END
466 
467 
468 
469 
470 
471