xref: /petsc/src/mat/impls/adj/mpi/mpiadj.c (revision 0f5bd95cca42d693ada6d048329ab39533680180)
1 #ifdef PETSC_RCS_HEADER
2 static char vcid[] = "$Id: mpiadj.c,v 1.28 1999/10/01 21:21:33 bsmith Exp bsmith $";
3 #endif
4 
5 /*
6     Defines the basic matrix operations for the ADJ adjacency list matrix data-structure.
7 */
8 #include "sys.h"
9 #include "src/mat/impls/adj/mpi/mpiadj.h"
10 
11 #undef __FUNC__
12 #define __FUNC__ "MatView_MPIAdj_ASCII"
13 extern int MatView_MPIAdj_ASCII(Mat A,Viewer viewer)
14 {
15   Mat_MPIAdj  *a = (Mat_MPIAdj *) A->data;
16   int         ierr, i,j, m = a->m,  format;
17   FILE        *fd;
18   char        *outputname;
19   MPI_Comm    comm = A->comm;
20 
21   PetscFunctionBegin;
22   ierr = ViewerASCIIGetPointer(viewer,&fd);CHKERRQ(ierr);
23   ierr = ViewerGetOutputname(viewer,&outputname);CHKERRQ(ierr);
24   ierr = ViewerGetFormat(viewer,&format);CHKERRQ(ierr);
25   if (format == VIEWER_FORMAT_ASCII_INFO) {
26     PetscFunctionReturn(0);
27   } else {
28     for ( i=0; i<m; i++ ) {
29       ierr = PetscSynchronizedFPrintf(comm,fd,"row %d:",i+a->rstart);CHKERRQ(ierr);
30       for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
31         ierr = PetscSynchronizedFPrintf(comm,fd," %d ",a->j[j]);CHKERRQ(ierr);
32       }
33       ierr = PetscSynchronizedFPrintf(comm,fd,"\n");CHKERRQ(ierr);
34     }
35   }
36   ierr = PetscSynchronizedFlush(comm);CHKERRQ(ierr);
37   PetscFunctionReturn(0);
38 }
39 
40 #undef __FUNC__
41 #define __FUNC__ "MatView_MPIAdj"
42 int MatView_MPIAdj(Mat A,Viewer viewer)
43 {
44   int         ierr;
45   int         isascii;
46 
47   PetscFunctionBegin;
48   isascii = PetscTypeCompare(viewer,ASCII_VIEWER);
49   if (isascii) {
50     ierr = MatView_MPIAdj_ASCII(A,viewer);CHKERRQ(ierr);
51   } else {
52     SETERRQ1(1,1,"Viewer type %s not supported by MPIAdj",((PetscObject)viewer)->type_name);
53   }
54   PetscFunctionReturn(0);
55 }
56 
57 #undef __FUNC__
58 #define __FUNC__ "MatDestroy_MPIAdj"
59 int MatDestroy_MPIAdj(Mat mat)
60 {
61   Mat_MPIAdj *a = (Mat_MPIAdj *) mat->data;
62   int        ierr;
63 
64   PetscFunctionBegin;
65 
66   if (mat->mapping) {
67     ierr = ISLocalToGlobalMappingDestroy(mat->mapping);CHKERRQ(ierr);
68   }
69   if (mat->bmapping) {
70     ierr = ISLocalToGlobalMappingDestroy(mat->bmapping);CHKERRQ(ierr);
71   }
72   if (mat->rmap) {
73     ierr = MapDestroy(mat->rmap);CHKERRQ(ierr);
74   }
75   if (mat->cmap) {
76     ierr = MapDestroy(mat->cmap);CHKERRQ(ierr);
77   }
78 
79 #if defined(PETSC_USE_LOG)
80   PLogObjectState((PetscObject)mat,"Rows=%d, Cols=%d, NZ=%d",mat->m,mat->n,a->nz);
81 #endif
82   if (a->diag) {ierr = PetscFree(a->diag);CHKERRQ(ierr);}
83   ierr = PetscFree(a->i);CHKERRQ(ierr);
84   ierr = PetscFree(a->j);CHKERRQ(ierr);
85   ierr = PetscFree(a->rowners);CHKERRQ(ierr);
86   ierr = PetscFree(a);CHKERRQ(ierr);
87 
88   PLogObjectDestroy(mat);
89   PetscHeaderDestroy(mat);
90   PetscFunctionReturn(0);
91 }
92 
93 
94 #undef __FUNC__
95 #define __FUNC__ "MatSetOption_MPIAdj"
96 int MatSetOption_MPIAdj(Mat A,MatOption op)
97 {
98   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
99 
100   PetscFunctionBegin;
101   if (op == MAT_STRUCTURALLY_SYMMETRIC) {
102     a->symmetric = PETSC_TRUE;
103   } else {
104     PLogInfo(A,"MatSetOption_MPIAdj:Option ignored\n");
105   }
106   PetscFunctionReturn(0);
107 }
108 
109 
110 /*
111      Adds diagonal pointers to sparse matrix structure.
112 */
113 
114 #undef __FUNC__
115 #define __FUNC__ "MatMarkDiag_MPIAdj"
116 int MatMarkDiag_MPIAdj(Mat A)
117 {
118   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
119   int        i,j, *diag, m = a->m;
120 
121   PetscFunctionBegin;
122   diag = (int *) PetscMalloc( (m+1)*sizeof(int));CHKPTRQ(diag);
123   PLogObjectMemory(A,(m+1)*sizeof(int));
124   for ( i=0; i<a->m; i++ ) {
125     for ( j=a->i[i]; j<a->i[i+1]; j++ ) {
126       if (a->j[j] == i) {
127         diag[i] = j;
128         break;
129       }
130     }
131   }
132   a->diag = diag;
133   PetscFunctionReturn(0);
134 }
135 
136 #undef __FUNC__
137 #define __FUNC__ "MatGetSize_MPIAdj"
138 int MatGetSize_MPIAdj(Mat A,int *m,int *n)
139 {
140   PetscFunctionBegin;
141   if (m) *m = A->M;
142   if (n) *n = A->N;
143   PetscFunctionReturn(0);
144 }
145 
146 #undef __FUNC__
147 #define __FUNC__ "MatGetSize_MPIAdj"
148 int MatGetLocalSize_MPIAdj(Mat A,int *m,int *n)
149 {
150   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
151   PetscFunctionBegin;
152   if (m) *m = a->m;
153   if (n) *n = A->N;
154   PetscFunctionReturn(0);
155 }
156 
157 #undef __FUNC__
158 #define __FUNC__ "MatGetOwnershipRange_MPIAdj"
159 int MatGetOwnershipRange_MPIAdj(Mat A,int *m,int *n)
160 {
161   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
162   PetscFunctionBegin;
163   *m = a->rstart; *n = a->rend;
164   PetscFunctionReturn(0);
165 }
166 
167 #undef __FUNC__
168 #define __FUNC__ "MatGetRow_MPIAdj"
169 int MatGetRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
170 {
171   Mat_MPIAdj *a = (Mat_MPIAdj *) A->data;
172   int        *itmp;
173 
174   PetscFunctionBegin;
175   row -= a->rstart;
176 
177   if (row < 0 || row >= a->m) SETERRQ(PETSC_ERR_ARG_OUTOFRANGE,0,"Row out of range");
178 
179   *nz = a->i[row+1] - a->i[row];
180   if (v) *v = PETSC_NULL;
181   if (idx) {
182     itmp = a->j + a->i[row];
183     if (*nz) {
184       *idx = itmp;
185     }
186     else *idx = 0;
187   }
188   PetscFunctionReturn(0);
189 }
190 
191 #undef __FUNC__
192 #define __FUNC__ "MatRestoreRow_MPIAdj"
193 int MatRestoreRow_MPIAdj(Mat A,int row,int *nz,int **idx,Scalar **v)
194 {
195   PetscFunctionBegin;
196   PetscFunctionReturn(0);
197 }
198 
199 #undef __FUNC__
200 #define __FUNC__ "MatGetBlockSize_MPIAdj"
201 int MatGetBlockSize_MPIAdj(Mat A, int *bs)
202 {
203   PetscFunctionBegin;
204   *bs = 1;
205   PetscFunctionReturn(0);
206 }
207 
208 
209 #undef __FUNC__
210 #define __FUNC__ "MatEqual_MPIAdj"
211 int MatEqual_MPIAdj(Mat A,Mat B, PetscTruth* flg)
212 {
213   Mat_MPIAdj *a = (Mat_MPIAdj *)A->data, *b = (Mat_MPIAdj *)B->data;
214   int         ierr;
215   PetscTruth  flag;
216 
217   PetscFunctionBegin;
218   if (B->type != MATMPIADJ) SETERRQ(PETSC_ERR_ARG_INCOMP,0,"Matrices must be same type");
219 
220   /* If the  matrix dimensions are not equal, or no of nonzeros */
221   if ((a->m != b->m ) ||( a->nz != b->nz)) {
222     flag = PETSC_FALSE;
223   }
224 
225   /* if the a->i are the same */
226   ierr = PetscMemcmp(a->i,b->i,(a->m+1)*sizeof(int),&flag);CHKERRQ(ierr);
227 
228   /* if a->j are the same */
229   ierr = PetscMemcmp(a->j, b->j, (a->nz)*sizeof(int),&flag);CHKERRQ(ierr);
230 
231   ierr = MPI_Allreduce(&flag,flg,1,MPI_INT,MPI_LAND,A->comm);CHKERRQ(ierr);
232   PetscFunctionReturn(0);
233 }
234 
235 
236 /* -------------------------------------------------------------------*/
237 static struct _MatOps MatOps_Values = {0,
238        MatGetRow_MPIAdj,
239        MatRestoreRow_MPIAdj,
240        0,
241        0,
242        0,
243        0,
244        0,
245        0,
246        0,
247        0,
248        0,
249        0,
250        0,
251        0,
252        0,
253        MatEqual_MPIAdj,
254        0,
255        0,
256        0,
257        0,
258        0,
259        0,
260        MatSetOption_MPIAdj,
261        0,
262        0,
263        0,
264        0,
265        0,
266        0,
267        MatGetSize_MPIAdj,
268        MatGetLocalSize_MPIAdj,
269        MatGetOwnershipRange_MPIAdj,
270        0,
271        0,
272        0,
273        0,
274        0,
275        0,
276        0,
277        0,
278        0,
279        0,
280        0,
281        0,
282        0,
283        0,
284        0,
285        0,
286        0,
287        0,
288        0,
289        MatGetBlockSize_MPIAdj,
290        0,
291        0,
292        0,
293        0,
294        0,
295        0,
296        0,
297        0,
298        0,
299        0,
300        0,
301        0,
302        MatGetMaps_Petsc};
303 
304 
305 #undef __FUNC__
306 #define __FUNC__ "MatCreateMPIAdj"
307 /*@C
308    MatCreateMPIAdj - Creates a sparse matrix representing an adjacency list.
309    The matrix does not have numerical values associated with it, but is
310    intended for ordering (to reduce bandwidth etc) and partitioning.
311 
312    Collective on MPI_Comm
313 
314    Input Parameters:
315 +  comm - MPI communicator, set to PETSC_COMM_SELF
316 .  m - number of local rows
317 .  n - number of columns
318 .  i - the indices into j for the start of each row
319 -  j - the column indices for each row (sorted for each row).
320        The indices in i and j start with zero (NOT with one).
321 
322    Output Parameter:
323 .  A - the matrix
324 
325    Level: intermediate
326 
327    Notes: This matrix object does not support most matrix operations, include
328    MatSetValues().
329    You must NOT free the ii and jj arrays yourself. PETSc will free them
330    when the matrix is destroyed.
331 
332    Possible values for MatSetOption() - MAT_STRUCTURALLY_SYMMETRIC
333 
334 .seealso: MatCreate(), MatCreateSeqAdj(), MatGetOrdering()
335 @*/
336 int MatCreateMPIAdj(MPI_Comm comm,int m,int n,int *i,int *j, Mat *A)
337 {
338   Mat        B;
339   Mat_MPIAdj *b;
340   int        ii,ierr, flg,size,rank;
341 
342   PetscFunctionBegin;
343   ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
344   ierr = MPI_Comm_rank(comm,&rank);CHKERRQ(ierr);
345 
346   *A                  = 0;
347   PetscHeaderCreate(B,_p_Mat,struct _MatOps,MAT_COOKIE,MATMPIADJ,"Mat",comm,MatDestroy,MatView);
348   PLogObjectCreate(B);
349   B->data             = (void *) (b = PetscNew(Mat_MPIAdj));CHKPTRQ(b);
350   ierr                = PetscMemzero(b,sizeof(Mat_MPIAdj));CHKERRQ(ierr);
351   ierr                = PetscMemcpy(B->ops,&MatOps_Values,sizeof(struct _MatOps));CHKERRQ(ierr);
352   B->ops->destroy     = MatDestroy_MPIAdj;
353   B->ops->view        = MatView_MPIAdj;
354   B->factor           = 0;
355   B->lupivotthreshold = 1.0;
356   B->mapping          = 0;
357   B->assembled        = PETSC_FALSE;
358 
359   b->m = m; B->m = m;
360   ierr = MPI_Allreduce(&m,&B->M,1,MPI_INT,MPI_SUM,comm);CHKERRQ(ierr);
361   B->n = n; B->N = n;
362 
363   /* the information in the maps duplicates the information computed below, eventually
364      we should remove the duplicate information that is not contained in the maps */
365   ierr = MapCreateMPI(comm,m,B->M,&B->rmap);CHKERRQ(ierr);
366   /* we don't know the "local columns" so just use the row information :-( */
367   ierr = MapCreateMPI(comm,m,B->M,&B->cmap);CHKERRQ(ierr);
368 
369   b->rowners = (int *) PetscMalloc((size+1)*sizeof(int));CHKPTRQ(b->rowners);
370   PLogObjectMemory(B,(size+2)*sizeof(int)+sizeof(struct _p_Mat)+sizeof(Mat_MPIAdj));
371   ierr = MPI_Allgather(&m,1,MPI_INT,b->rowners+1,1,MPI_INT,comm);CHKERRQ(ierr);
372   b->rowners[0] = 0;
373   for ( ii=2; ii<=size; ii++ ) {
374     b->rowners[ii] += b->rowners[ii-1];
375   }
376   b->rstart = b->rowners[rank];
377   b->rend   = b->rowners[rank+1];
378 
379   b->j  = j;
380   b->i  = i;
381 
382   b->nz               = i[m];
383   b->diag             = 0;
384   b->symmetric        = PETSC_FALSE;
385 
386   *A = B;
387 
388   ierr = OptionsHasName(PETSC_NULL,"-help", &flg);CHKERRQ(ierr);
389   if (flg) {ierr = MatPrintHelp(B);CHKERRQ(ierr); }
390   ierr = MatAssemblyBegin(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
391   ierr = MatAssemblyEnd(B,MAT_FINAL_ASSEMBLY);CHKERRQ(ierr);
392   PetscFunctionReturn(0);
393 }
394 
395 
396 
397 
398 
399 
400 
401 
402 
403