xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision de8cb20032df8a6c332461b20fe56d14f0f1e0ae)
1 /*$Id: mpiadj.c,v 1.66 2001/08/07 03:02:59 balay Exp $*/
2 
3 /*
4     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
5 */
6 #include "petscsys.h"
7 #include "src/mat/impls/adj/mpi/mpiadj.h"
8 
9 #undef __FUNCT__
10 #define __FUNCT__ "MatView_MPIAdj_ASCII"
11 int MatView_MPIAdj_ASCII(Mat A,PetscViewer viewer)
12 {
13   Mat_MPIAdj        *a = (Mat_MPIAdj*)A->data;
14   int               ierr,i,j,m = A->m;
15   char              *name;
16   PetscViewerFormat format;
17 
18   PetscFunctionBegin;
19   ierr = PetscObjectGetName((PetscObject)A,&name);CHKERRQ(ierr);
20   ierr = PetscViewerGetFormat(viewer,&format);CHKERRQ(ierr);
21   if (format == PETSC_VIEWER_ASCII_INFO) {
22     PetscFunctionReturn(0);
23   } else if (format == PETSC_VIEWER_ASCII_MATLAB) {
24     SETERRQ(PETSC_ERR_SUP,"Matlab format not supported");
25   } else {
26     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_NO);CHKERRQ(ierr);
27     for (i=0; i<m; i++) {
28       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"row %d:",i+a->rstart);CHKERRQ(ierr);
29       for (j=a->i[i]; j<a->i[i+1]; j++) {
30         ierr = PetscViewerASCIISynchronizedPrintf(viewer," %d ",a->j[j]);CHKERRQ(ierr);
31       }
32       ierr = PetscViewerASCIISynchronizedPrintf(viewer,"\n");CHKERRQ(ierr);
33     }
34     ierr = PetscViewerASCIIUseTabs(viewer,PETSC_YES);CHKERRQ(ierr);
35   }
36   ierr = PetscViewerFlush(viewer);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNCT__
41 #define __FUNCT__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,PetscViewer viewer)
43 {
44   int        ierr;
45   PetscTruth isascii;
46 
47   PetscFunctionBegin;
48   ierr = PetscTypeCompare((PetscObject)viewer,PETSC_VIEWER_ASCII,&isascii);CHKERRQ(ierr);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNCT__
58 #define __FUNCT__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj*)mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 #if defined(PETSC_USE_LOG)
66   PetscLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
67 #endif
68   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
69   if (a->freeaij) {
70     ierr = PetscFree(a->i);CHKERRQ(ierr);
71     ierr = PetscFree(a->j);CHKERRQ(ierr);
72     if (a->values) {ierr = PetscFree(a->values);CHKERRQ(ierr);}
73   }
74   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
75   ierr = PetscFree(a);CHKERRQ(ierr);
76   PetscFunctionReturn(0);
77 }
78 
79 #undef __FUNCT__
80 #define __FUNCT__ "MatSetOption_MPIAdj"
81 int MatSetOption_MPIAdj(Mat A,MatOption op)
82 {
83   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
84 
85   PetscFunctionBegin;
86   switch (op) {
87   case MAT_STRUCTURALLY_SYMMETRIC:
88     a->symmetric = PETSC_TRUE;
89     break;
90   case MAT_USE_SINGLE_PRECISION_SOLVES:
91   default:
92     PetscLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
93     break;
94   }
95   PetscFunctionReturn(0);
96 }
97 
98 
99 /*
100      Adds diagonal pointers to sparse matrix structure.
101 */
102 
103 #undef __FUNCT__
104 #define __FUNCT__ "MatMarkDiagonal_MPIAdj"
105 int MatMarkDiagonal_MPIAdj(Mat A)
106 {
107   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
108   int        i,j,*diag,m = A->m,ierr;
109 
110   PetscFunctionBegin;
111   ierr = PetscMalloc((m+1)*sizeof(int),&diag);CHKERRQ(ierr);
112   PetscLogObjectMemory(A,(m+1)*sizeof(int));
113   for (i=0; i<A->m; i++) {
114     for (j=a->i[i]; j<a->i[i+1]; j++) {
115       if (a->j[j] == i) {
116         diag[i] = j;
117         break;
118       }
119     }
120   }
121   a->diag = diag;
122   PetscFunctionReturn(0);
123 }
124 
125 #undef __FUNCT__
126 #define __FUNCT__ "MatGetRow_MPIAdj"
127 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
128 {
129   Mat_MPIAdj *a = (Mat_MPIAdj*)A->data;
130   int        *itmp;
131 
132   PetscFunctionBegin;
133   row -= a->rstart;
134 
135   if (row < 0 || row >= A->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,"Row out of range");
136 
137   *nz = a->i[row+1] - a->i[row];
138   if (v) *v = PETSC_NULL;
139   if (idx) {
140     itmp = a->j + a->i[row];
141     if (*nz) {
142       *idx = itmp;
143     }
144     else *idx = 0;
145   }
146   PetscFunctionReturn(0);
147 }
148 
149 #undef __FUNCT__
150 #define __FUNCT__ "MatRestoreRow_MPIAdj"
151 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,PetscScalar **v)
152 {
153   PetscFunctionBegin;
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNCT__
158 #define __FUNCT__ "MatGetBlockSize_MPIAdj"
159 int MatGetBlockSize_MPIAdj(Mat A,int *bs)
160 {
161   PetscFunctionBegin;
162   *bs = 1;
163   PetscFunctionReturn(0);
164 }
165 
166 
167 #undef __FUNCT__
168 #define __FUNCT__ "MatEqual_MPIAdj"
169 int MatEqual_MPIAdj(Mat A,Mat B,PetscTruth* flg)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data,*b = (Mat_MPIAdj *)B->data;
172   int         ierr;
173   PetscTruth  flag;
174 
175   PetscFunctionBegin;
176   ierr = PetscTypeCompare((PetscObject)B,MATMPIADJ,&flag);CHKERRQ(ierr);
177   if (!flag) SETERRQ(PETSC_ERR_ARG_INCOMP,"Matrices must be same type");
178 
179   /* If the  matrix dimensions are not equal,or no of nonzeros */
180   if ((A->m != B->m) ||(a->nz != b->nz)) {
181     flag = PETSC_FALSE;
182   }
183 
184   /* if the a->i are the same */
185   ierr = PetscMemcmp(a->i,b->i,(A->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
186 
187   /* if a->j are the same */
188   ierr = PetscMemcmp(a->j,b->j,(a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
189 
190   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
191   PetscFunctionReturn(0);
192 }
193 
194 #undef __FUNCT__
195 #define __FUNCT__ "MatGetRowIJ_MPIAdj"
196 int MatGetRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
197 {
198   int        ierr,size,i;
199   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
200 
201   PetscFunctionBegin;
202   ierr = MPI_Comm_size(A->comm,&size);CHKERRQ(ierr);
203   if (size > 1) {*done = PETSC_FALSE; PetscFunctionReturn(0);}
204   *m    = A->m;
205   *ia   = a->i;
206   *ja   = a->j;
207   *done = PETSC_TRUE;
208   if (oshift) {
209     for (i=0; i<(*ia)[*m]; i++) {
210       (*ja)[i]++;
211     }
212     for (i=0; i<=(*m); i++) (*ia)[i]++;
213   }
214   PetscFunctionReturn(0);
215 }
216 
217 #undef __FUNCT__
218 #define __FUNCT__ "MatRestoreRowIJ_MPIAdj"
219 int MatRestoreRowIJ_MPIAdj(Mat A,int oshift,PetscTruth symmetric,int *m,int **ia,int **ja,PetscTruth *done)
220 {
221   int        i;
222   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data;
223 
224   PetscFunctionBegin;
225   if (ia && a->i != *ia) SETERRQ(1,"ia passed back is not one obtained with MatGetRowIJ()");
226   if (ja && a->j != *ja) SETERRQ(1,"ja passed back is not one obtained with MatGetRowIJ()");
227   if (oshift) {
228     for (i=0; i<=(*m); i++) (*ia)[i]--;
229     for (i=0; i<(*ia)[*m]; i++) {
230       (*ja)[i]--;
231     }
232   }
233   PetscFunctionReturn(0);
234 }
235 
236 /* -------------------------------------------------------------------*/
237 static struct _MatOps MatOps_Values = {0,
238        MatGetRow_MPIAdj,
239        MatRestoreRow_MPIAdj,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        MatEqual_MPIAdj,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatSetOption_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        0,
268        0,
269        0,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        MatGetBlockSize_MPIAdj,
289        MatGetRowIJ_MPIAdj,
290        MatRestoreRowIJ_MPIAdj,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        MatDestroy_MPIAdj,
300        MatView_MPIAdj,
301        MatGetPetscMaps_Petsc};
302 
303 
304 EXTERN_C_BEGIN
305 #undef __FUNCT__
306 #define __FUNCT__ "MatCreate_MPIAdj"
307 int MatCreate_MPIAdj(Mat B)
308 {
309   Mat_MPIAdj *b;
310   int        ii,ierr,size,rank;
311 
312   PetscFunctionBegin;
313   ierr = MPI_Comm_size(B->comm,&size);CHKERRQ(ierr);
314   ierr = MPI_Comm_rank(B->comm,&rank);CHKERRQ(ierr);
315 
316   ierr                = PetscNew(Mat_MPIAdj,&b);CHKERRQ(ierr);
317   B->data             = (void*)b;
318   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
319   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
320   B->factor           = 0;
321   B->lupivotthreshold = 1.0;
322   B->mapping          = 0;
323   B->assembled        = PETSC_FALSE;
324 
325   ierr = MPI_Allreduce(&B->m,&B->M,1,MPI_INT,MPI_SUM,B->comm);CHKERRQ(ierr);
326   B->n = B->N;
327 
328   /* the information in the maps duplicates the information computed below, eventually
329      we should remove the duplicate information that is not contained in the maps */
330   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->rmap);CHKERRQ(ierr);
331   /* we don't know the "local columns" so just use the row information :-(*/
332   ierr = PetscMapCreateMPI(B->comm,B->m,B->M,&B->cmap);CHKERRQ(ierr);
333 
334   ierr = PetscMalloc((size+1)*sizeof(int),&b->rowners);CHKERRQ(ierr);
335   PetscLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
336   ierr = MPI_Allgather(&B->m,1,MPI_INT,b->rowners+1,1,MPI_INT,B->comm);CHKERRQ(ierr);
337   b->rowners[0] = 0;
338   for (ii=2; ii<=size; ii++) {
339     b->rowners[ii] += b->rowners[ii-1];
340   }
341   b->rstart = b->rowners[rank];
342   b->rend   = b->rowners[rank+1];
343 
344   PetscFunctionReturn(0);
345 }
346 EXTERN_C_END
347 
348 #undef __FUNCT__
349 #define __FUNCT__ "MatMPIAdjSetPreallocation"
350 int MatMPIAdjSetPreallocation(Mat B,int *i,int *j,int *values)
351 {
352   Mat_MPIAdj *b = (Mat_MPIAdj *)B->data;
353   int        ierr;
354 #if defined(PETSC_USE_BOPT_g)
355   int        ii;
356 #endif
357 
358   PetscFunctionBegin;
359   B->preallocated = PETSC_TRUE;
360 #if defined(PETSC_USE_BOPT_g)
361   if (i[0] != 0) SETERRQ1(1,"First i[] index must be zero, instead it is %d\n",i[0]);
362   for (ii=1; ii<B->m; ii++) {
363     if (i[ii] < 0 || i[ii] < i[ii-1]) {
364       SETERRQ4(1,"i[%d] index is out of range: i[%d]",ii,i[ii],ii-1,i[ii-1]);
365     }
366   }
367   for (ii=0; ii<i[B->m]; ii++) {
368     if (j[ii] < 0 || j[ii] >= B->N) {
369       SETERRQ2(1,"Column index %d out of range %d\n",ii,j[ii]);
370     }
371   }
372 #endif
373 
374   b->j      = j;
375   b->i      = i;
376   b->values = values;
377 
378   b->nz               = i[B->m];
379   b->diag             = 0;
380   b->symmetric        = PETSC_FALSE;
381   b->freeaij          = PETSC_TRUE;
382 
383   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
384   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
385   PetscFunctionReturn(0);
386 }
387 
388 #undef __FUNCT__
389 #define __FUNCT__ "MatCreateMPIAdj"
390 /*@C
391    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
392    The matrix does not have numerical values associated with it, but is
393    intended for ordering (to reduce bandwidth etc) and partitioning.
394 
395    Collective on MPI_Comm
396 
397    Input Parameters:
398 +  comm - MPI communicator
399 .  m - number of local rows
400 .  n - number of columns
401 .  i - the indices into j for the start of each row
402 .  j - the column indices for each row (sorted for each row).
403        The indices in i and j start with zero (NOT with one).
404 -  values -[optional] edge weights
405 
406    Output Parameter:
407 .  A - the matrix
408 
409    Level: intermediate
410 
411    Notes: This matrix object does not support most matrix operations, include
412    MatSetValues().
413    You must NOT free the ii, values and jj arrays yourself. PETSc will free them
414    when the matrix is destroyed. And you must allocate them with PetscMalloc(). If you
415     call from Fortran you need not create the arrays with PetscMalloc().
416    Should not include the matrix diagonals.
417 
418    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
419 
420 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
421 @*/
422 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j,int *values,Mat *A)
423 {
424   int        ierr;
425 
426   PetscFunctionBegin;
427   ierr = MatCreate(comm,m,n,PETSC_DETERMINE,n,A);CHKERRQ(ierr);
428   ierr = MatSetType(*A,MATMPIADJ);CHKERRQ(ierr);
429   ierr = MatMPIAdjSetPreallocation(*A,i,j,values);CHKERRQ(ierr);
430   PetscFunctionReturn(0);
431 }
432 
433 EXTERN_C_BEGIN
434 #undef __FUNCT__
435 #define __FUNCT__ "MatConvertTo_MPIAdj"
436 int MatConvertTo_MPIAdj(Mat A,MatType type,Mat *B)
437 {
438   int          i,ierr,m,N,nzeros = 0,*ia,*ja,*rj,len,rstart,cnt,j,*a;
439   PetscScalar  *ra;
440   MPI_Comm     comm;
441 
442   PetscFunctionBegin;
443   ierr = MatGetSize(A,PETSC_NULL,&N);CHKERRQ(ierr);
444   ierr = MatGetLocalSize(A,&m,PETSC_NULL);CHKERRQ(ierr);
445   ierr = MatGetOwnershipRange(A,&rstart,PETSC_NULL);CHKERRQ(ierr);
446 
447   /* count the number of nonzeros per row */
448   for (i=0; i<m; i++) {
449     ierr   = MatGetRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
450     for (j=0; j<len; j++) {
451       if (rj[j] == i+rstart) {len--; break;}    /* don't count diagonal */
452     }
453     ierr   = MatRestoreRow(A,i+rstart,&len,&rj,PETSC_NULL);CHKERRQ(ierr);
454     nzeros += len;
455   }
456 
457   /* malloc space for nonzeros */
458   ierr = PetscMalloc((nzeros+1)*sizeof(int),&a);CHKERRQ(ierr);
459   ierr = PetscMalloc((N+1)*sizeof(int),&ia);CHKERRQ(ierr);
460   ierr = PetscMalloc((nzeros+1)*sizeof(int),&ja);CHKERRQ(ierr);
461 
462   nzeros = 0;
463   ia[0]  = 0;
464   for (i=0; i<m; i++) {
465     ierr    = MatGetRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
466     cnt     = 0;
467     for (j=0; j<len; j++) {
468       if (rj[j] != i+rstart) { /* if not diagonal */
469         a[nzeros+cnt]    = (int) PetscAbsScalar(ra[j]);
470         ja[nzeros+cnt++] = rj[j];
471       }
472     }
473     ierr    = MatRestoreRow(A,i+rstart,&len,&rj,&ra);CHKERRQ(ierr);
474     nzeros += cnt;
475     ia[i+1] = nzeros;
476   }
477 
478   ierr = PetscObjectGetComm((PetscObject)A,&comm);CHKERRQ(ierr);
479   ierr = MatCreateMPIAdj(comm,m,N,ia,ja,a,B);CHKERRQ(ierr);
480 
481   PetscFunctionReturn(0);
482 }
483 EXTERN_C_END
484 
485 
486 
487 
488 
489