xref: /petsc/src/ksp/pc/impls/redistribute/redistribute.c (revision 7d5fd1e4d9337468ad3f05b65b7facdcd2dfd2a4)
1 
2 /*
3   This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
4 */
5 #include <petsc/private/pcimpl.h>     /*I "petscksp.h" I*/
6 #include <petscksp.h>
7 
8 typedef struct {
9   KSP         ksp;
10   Vec         x,b;
11   VecScatter  scatter;
12   IS          is;
13   PetscInt    dcnt,*drows;    /* these are the local rows that have only diagonal entry */
14   PetscScalar *diag;
15   Vec         work;
16 } PC_Redistribute;
17 
18 static PetscErrorCode PCView_Redistribute(PC pc,PetscViewer viewer)
19 {
20   PC_Redistribute *red = (PC_Redistribute*)pc->data;
21   PetscErrorCode  ierr;
22   PetscBool       iascii,isstring;
23   PetscInt        ncnt,N;
24 
25   PetscFunctionBegin;
26   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
27   ierr = PetscObjectTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
28   if (iascii) {
29     ierr = MPIU_Allreduce(&red->dcnt,&ncnt,1,MPIU_INT,MPI_SUM,PetscObjectComm((PetscObject)pc));CHKERRMPI(ierr);
30     ierr = MatGetSize(pc->pmat,&N,NULL);CHKERRQ(ierr);
31     ierr = PetscViewerASCIIPrintf(viewer,"    Number rows eliminated %D Percentage rows eliminated %g\n",ncnt,100.0*((PetscReal)ncnt)/((PetscReal)N));CHKERRQ(ierr);
32     ierr = PetscViewerASCIIPrintf(viewer,"  Redistribute preconditioner: \n");CHKERRQ(ierr);
33     ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr);
34   } else if (isstring) {
35     ierr = PetscViewerStringSPrintf(viewer," Redistribute preconditioner");CHKERRQ(ierr);
36     ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr);
37   }
38   PetscFunctionReturn(0);
39 }
40 
41 static PetscErrorCode PCSetUp_Redistribute(PC pc)
42 {
43   PC_Redistribute          *red = (PC_Redistribute*)pc->data;
44   PetscErrorCode           ierr;
45   MPI_Comm                 comm;
46   PetscInt                 rstart,rend,i,nz,cnt,*rows,ncnt,dcnt,*drows;
47   PetscLayout              map,nmap;
48   PetscMPIInt              size,tag,n;
49   PETSC_UNUSED PetscMPIInt imdex;
50   PetscInt                 *source = NULL;
51   PetscMPIInt              *sizes = NULL,nrecvs;
52   PetscInt                 j,nsends;
53   PetscInt                 *owner = NULL,*starts = NULL,count,slen;
54   PetscInt                 *rvalues,*svalues,recvtotal;
55   PetscMPIInt              *onodes1,*olengths1;
56   MPI_Request              *send_waits = NULL,*recv_waits = NULL;
57   MPI_Status               recv_status,*send_status;
58   Vec                      tvec,diag;
59   Mat                      tmat;
60   const PetscScalar        *d,*values;
61   const PetscInt           *cols;
62 
63   PetscFunctionBegin;
64   if (pc->setupcalled) {
65     ierr = KSPGetOperators(red->ksp,NULL,&tmat);CHKERRQ(ierr);
66     ierr = MatCreateSubMatrix(pc->pmat,red->is,red->is,MAT_REUSE_MATRIX,&tmat);CHKERRQ(ierr);
67     ierr = KSPSetOperators(red->ksp,tmat,tmat);CHKERRQ(ierr);
68   } else {
69     PetscInt NN;
70 
71     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
72     ierr = MPI_Comm_size(comm,&size);CHKERRMPI(ierr);
73     ierr = PetscObjectGetNewTag((PetscObject)pc,&tag);CHKERRQ(ierr);
74 
75     /* count non-diagonal rows on process */
76     ierr = MatGetOwnershipRange(pc->mat,&rstart,&rend);CHKERRQ(ierr);
77     cnt  = 0;
78     for (i=rstart; i<rend; i++) {
79       ierr = MatGetRow(pc->mat,i,&nz,&cols,&values);CHKERRQ(ierr);
80       for (PetscInt j=0; j<nz; j++) {
81         if (values[j] != 0 && cols[j] != i) {
82           cnt++;
83           break;
84         }
85       }
86       ierr = MatRestoreRow(pc->mat,i,&nz,&cols,&values);CHKERRQ(ierr);
87     }
88     ierr = PetscMalloc1(cnt,&rows);CHKERRQ(ierr);
89     ierr = PetscMalloc1(rend - rstart - cnt,&drows);CHKERRQ(ierr);
90 
91     /* list non-diagonal rows on process */
92     cnt = 0; dcnt = 0;
93     for (i=rstart; i<rend; i++) {
94       PetscBool diagonly = PETSC_TRUE;
95       ierr = MatGetRow(pc->mat,i,&nz,&cols,&values);CHKERRQ(ierr);
96       for (PetscInt j=0; j<nz; j++) {
97         if (values[j] != 0 && cols[j] != i) {
98           diagonly = PETSC_FALSE;
99           break;
100         }
101       }
102       if (!diagonly) rows[cnt++] = i;
103       else drows[dcnt++] = i - rstart;
104       ierr = MatRestoreRow(pc->mat,i,&nz,&cols,&values);CHKERRQ(ierr);
105     }
106 
107     /* create PetscLayout for non-diagonal rows on each process */
108     ierr   = PetscLayoutCreate(comm,&map);CHKERRQ(ierr);
109     ierr   = PetscLayoutSetLocalSize(map,cnt);CHKERRQ(ierr);
110     ierr   = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
111     ierr   = PetscLayoutSetUp(map);CHKERRQ(ierr);
112     rstart = map->rstart;
113     rend   = map->rend;
114 
115     /* create PetscLayout for load-balanced non-diagonal rows on each process */
116     ierr = PetscLayoutCreate(comm,&nmap);CHKERRQ(ierr);
117     ierr = MPIU_Allreduce(&cnt,&ncnt,1,MPIU_INT,MPI_SUM,comm);CHKERRMPI(ierr);
118     ierr = PetscLayoutSetSize(nmap,ncnt);CHKERRQ(ierr);
119     ierr = PetscLayoutSetBlockSize(nmap,1);CHKERRQ(ierr);
120     ierr = PetscLayoutSetUp(nmap);CHKERRQ(ierr);
121 
122     ierr = MatGetSize(pc->pmat,&NN,NULL);CHKERRQ(ierr);
123     ierr = PetscInfo2(pc,"Number of diagonal rows eliminated %d, percentage eliminated %g\n",NN-ncnt,((PetscReal)(NN-ncnt))/((PetscReal)(NN)));CHKERRQ(ierr);
124 
125     if (size > 1) {
126       /* the following block of code assumes MPI can send messages to self, which is not supported for MPI-uni hence we need to handle the size 1 case as a special case */
127       /*
128        this code is taken from VecScatterCreate_PtoS()
129        Determines what rows need to be moved where to
130        load balance the non-diagonal rows
131        */
132       /*  count number of contributors to each processor */
133       ierr   = PetscMalloc2(size,&sizes,cnt,&owner);CHKERRQ(ierr);
134       ierr   = PetscArrayzero(sizes,size);CHKERRQ(ierr);
135       j      = 0;
136       nsends = 0;
137       for (i=rstart; i<rend; i++) {
138         if (i < nmap->range[j]) j = 0;
139         for (; j<size; j++) {
140           if (i < nmap->range[j+1]) {
141             if (!sizes[j]++) nsends++;
142             owner[i-rstart] = j;
143             break;
144           }
145         }
146       }
147       /* inform other processors of number of messages and max length*/
148       ierr      = PetscGatherNumberOfMessages(comm,NULL,sizes,&nrecvs);CHKERRQ(ierr);
149       ierr      = PetscGatherMessageLengths(comm,nsends,nrecvs,sizes,&onodes1,&olengths1);CHKERRQ(ierr);
150       ierr      = PetscSortMPIIntWithArray(nrecvs,onodes1,olengths1);CHKERRQ(ierr);
151       recvtotal = 0; for (i=0; i<nrecvs; i++) recvtotal += olengths1[i];
152 
153       /* post receives:  rvalues - rows I will own; count - nu */
154       ierr  = PetscMalloc3(recvtotal,&rvalues,nrecvs,&source,nrecvs,&recv_waits);CHKERRQ(ierr);
155       count = 0;
156       for (i=0; i<nrecvs; i++) {
157         ierr   = MPI_Irecv((rvalues+count),olengths1[i],MPIU_INT,onodes1[i],tag,comm,recv_waits+i);CHKERRMPI(ierr);
158         count += olengths1[i];
159       }
160 
161       /* do sends:
162        1) starts[i] gives the starting index in svalues for stuff going to
163        the ith processor
164        */
165       ierr      = PetscMalloc3(cnt,&svalues,nsends,&send_waits,size,&starts);CHKERRQ(ierr);
166       starts[0] = 0;
167       for (i=1; i<size; i++) starts[i] = starts[i-1] + sizes[i-1];
168       for (i=0; i<cnt; i++)  svalues[starts[owner[i]]++] = rows[i];
169       for (i=0; i<cnt; i++)  rows[i] = rows[i] - rstart;
170       red->drows = drows;
171       red->dcnt  = dcnt;
172       ierr       = PetscFree(rows);CHKERRQ(ierr);
173 
174       starts[0] = 0;
175       for (i=1; i<size; i++) starts[i] = starts[i-1] + sizes[i-1];
176       count = 0;
177       for (i=0; i<size; i++) {
178         if (sizes[i]) {
179           ierr = MPI_Isend(svalues+starts[i],sizes[i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRMPI(ierr);
180         }
181       }
182 
183       /*  wait on receives */
184       count = nrecvs;
185       slen  = 0;
186       while (count) {
187         ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRMPI(ierr);
188         /* unpack receives into our local space */
189         ierr  = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRMPI(ierr);
190         slen += n;
191         count--;
192       }
193       if (slen != recvtotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Total message lengths %D not expected %D",slen,recvtotal);
194       ierr = ISCreateGeneral(comm,slen,rvalues,PETSC_COPY_VALUES,&red->is);CHKERRQ(ierr);
195 
196       /* free all work space */
197       ierr = PetscFree(olengths1);CHKERRQ(ierr);
198       ierr = PetscFree(onodes1);CHKERRQ(ierr);
199       ierr = PetscFree3(rvalues,source,recv_waits);CHKERRQ(ierr);
200       ierr = PetscFree2(sizes,owner);CHKERRQ(ierr);
201       if (nsends) {   /* wait on sends */
202         ierr = PetscMalloc1(nsends,&send_status);CHKERRQ(ierr);
203         ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRMPI(ierr);
204         ierr = PetscFree(send_status);CHKERRQ(ierr);
205       }
206       ierr = PetscFree3(svalues,send_waits,starts);CHKERRQ(ierr);
207     } else {
208       ierr = ISCreateGeneral(comm,cnt,rows,PETSC_OWN_POINTER,&red->is);CHKERRQ(ierr);
209       red->drows = drows;
210       red->dcnt  = dcnt;
211       slen = cnt;
212     }
213     ierr = PetscLayoutDestroy(&map);CHKERRQ(ierr);
214     ierr = PetscLayoutDestroy(&nmap);CHKERRQ(ierr);
215 
216     ierr = VecCreateMPI(comm,slen,PETSC_DETERMINE,&red->b);CHKERRQ(ierr);
217     ierr = VecDuplicate(red->b,&red->x);CHKERRQ(ierr);
218     ierr = MatCreateVecs(pc->pmat,&tvec,NULL);CHKERRQ(ierr);
219     ierr = VecScatterCreate(tvec,red->is,red->b,NULL,&red->scatter);CHKERRQ(ierr);
220     ierr = VecDestroy(&tvec);CHKERRQ(ierr);
221     ierr = MatCreateSubMatrix(pc->pmat,red->is,red->is,MAT_INITIAL_MATRIX,&tmat);CHKERRQ(ierr);
222     ierr = KSPSetOperators(red->ksp,tmat,tmat);CHKERRQ(ierr);
223     ierr = MatDestroy(&tmat);CHKERRQ(ierr);
224   }
225 
226   /* get diagonal portion of matrix */
227   ierr = PetscFree(red->diag);CHKERRQ(ierr);
228   ierr = PetscMalloc1(red->dcnt,&red->diag);CHKERRQ(ierr);
229   ierr = MatCreateVecs(pc->pmat,&diag,NULL);CHKERRQ(ierr);
230   ierr = MatGetDiagonal(pc->pmat,diag);CHKERRQ(ierr);
231   ierr = VecGetArrayRead(diag,&d);CHKERRQ(ierr);
232   for (i=0; i<red->dcnt; i++) red->diag[i] = 1.0/d[red->drows[i]];
233   ierr = VecRestoreArrayRead(diag,&d);CHKERRQ(ierr);
234   ierr = VecDestroy(&diag);CHKERRQ(ierr);
235   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
236   PetscFunctionReturn(0);
237 }
238 
239 static PetscErrorCode PCApply_Redistribute(PC pc,Vec b,Vec x)
240 {
241   PC_Redistribute   *red = (PC_Redistribute*)pc->data;
242   PetscErrorCode    ierr;
243   PetscInt          dcnt   = red->dcnt,i;
244   const PetscInt    *drows = red->drows;
245   PetscScalar       *xwork;
246   const PetscScalar *bwork,*diag = red->diag;
247 
248   PetscFunctionBegin;
249   if (!red->work) {
250     ierr = VecDuplicate(b,&red->work);CHKERRQ(ierr);
251   }
252   /* compute the rows of solution that have diagonal entries only */
253   ierr = VecSet(x,0.0);CHKERRQ(ierr);         /* x = diag(A)^{-1} b */
254   ierr = VecGetArray(x,&xwork);CHKERRQ(ierr);
255   ierr = VecGetArrayRead(b,&bwork);CHKERRQ(ierr);
256   for (i=0; i<dcnt; i++) xwork[drows[i]] = diag[i]*bwork[drows[i]];
257   ierr = PetscLogFlops(dcnt);CHKERRQ(ierr);
258   ierr = VecRestoreArray(red->work,&xwork);CHKERRQ(ierr);
259   ierr = VecRestoreArrayRead(b,&bwork);CHKERRQ(ierr);
260   /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */
261   ierr = MatMult(pc->pmat,x,red->work);CHKERRQ(ierr);
262   ierr = VecAYPX(red->work,-1.0,b);CHKERRQ(ierr);   /* red->work = b - A x */
263 
264   ierr = VecScatterBegin(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
265   ierr = VecScatterEnd(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
266   ierr = KSPSolve(red->ksp,red->b,red->x);CHKERRQ(ierr);
267   ierr = KSPCheckSolve(red->ksp,pc,red->x);CHKERRQ(ierr);
268   ierr = VecScatterBegin(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
269   ierr = VecScatterEnd(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
270   PetscFunctionReturn(0);
271 }
272 
273 static PetscErrorCode PCDestroy_Redistribute(PC pc)
274 {
275   PC_Redistribute *red = (PC_Redistribute*)pc->data;
276   PetscErrorCode  ierr;
277 
278   PetscFunctionBegin;
279   ierr = VecScatterDestroy(&red->scatter);CHKERRQ(ierr);
280   ierr = ISDestroy(&red->is);CHKERRQ(ierr);
281   ierr = VecDestroy(&red->b);CHKERRQ(ierr);
282   ierr = VecDestroy(&red->x);CHKERRQ(ierr);
283   ierr = KSPDestroy(&red->ksp);CHKERRQ(ierr);
284   ierr = VecDestroy(&red->work);CHKERRQ(ierr);
285   ierr = PetscFree(red->drows);CHKERRQ(ierr);
286   ierr = PetscFree(red->diag);CHKERRQ(ierr);
287   ierr = PetscFree(pc->data);CHKERRQ(ierr);
288   PetscFunctionReturn(0);
289 }
290 
291 static PetscErrorCode PCSetFromOptions_Redistribute(PetscOptionItems *PetscOptionsObject,PC pc)
292 {
293   PetscErrorCode  ierr;
294   PC_Redistribute *red = (PC_Redistribute*)pc->data;
295 
296   PetscFunctionBegin;
297   ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
298   PetscFunctionReturn(0);
299 }
300 
301 /*@
302    PCRedistributeGetKSP - Gets the KSP created by the PCREDISTRIBUTE
303 
304    Not Collective
305 
306    Input Parameter:
307 .  pc - the preconditioner context
308 
309    Output Parameter:
310 .  innerksp - the inner KSP
311 
312    Level: advanced
313 
314 @*/
315 PetscErrorCode  PCRedistributeGetKSP(PC pc,KSP *innerksp)
316 {
317   PC_Redistribute *red = (PC_Redistribute*)pc->data;
318 
319   PetscFunctionBegin;
320   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
321   PetscValidPointer(innerksp,2);
322   *innerksp = red->ksp;
323   PetscFunctionReturn(0);
324 }
325 
326 /* -------------------------------------------------------------------------------------*/
327 /*MC
328      PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows that only have a diagonal entry and then applys a KSP to that new matrix
329 
330      Options for the redistribute preconditioners can be set with -redistribute_ksp_xxx <values> and -redistribute_pc_xxx <values>
331 
332      Notes:
333     Usually run this with -ksp_type preonly
334 
335      If you have used MatZeroRows() to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, -ksp_type preonly
336      -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc to take advantage of the symmetry.
337 
338      This does NOT call a partitioner to reorder rows to lower communication; the ordering of the rows in the original matrix and redistributed matrix is the same.
339 
340      Developer Notes:
341     Should add an option to this preconditioner to use a partitioner to redistribute the rows to lower communication.
342 
343    Level: intermediate
344 
345 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedistributeGetKSP()
346 M*/
347 
348 PETSC_EXTERN PetscErrorCode PCCreate_Redistribute(PC pc)
349 {
350   PetscErrorCode  ierr;
351   PC_Redistribute *red;
352   const char      *prefix;
353 
354   PetscFunctionBegin;
355   ierr     = PetscNewLog(pc,&red);CHKERRQ(ierr);
356   pc->data = (void*)red;
357 
358   pc->ops->apply          = PCApply_Redistribute;
359   pc->ops->applytranspose = NULL;
360   pc->ops->setup          = PCSetUp_Redistribute;
361   pc->ops->destroy        = PCDestroy_Redistribute;
362   pc->ops->setfromoptions = PCSetFromOptions_Redistribute;
363   pc->ops->view           = PCView_Redistribute;
364 
365   ierr = KSPCreate(PetscObjectComm((PetscObject)pc),&red->ksp);CHKERRQ(ierr);
366   ierr = KSPSetErrorIfNotConverged(red->ksp,pc->erroriffailure);CHKERRQ(ierr);
367   ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
368   ierr = PetscLogObjectParent((PetscObject)pc,(PetscObject)red->ksp);CHKERRQ(ierr);
369   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
370   ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
371   ierr = KSPAppendOptionsPrefix(red->ksp,"redistribute_");CHKERRQ(ierr);
372   PetscFunctionReturn(0);
373 }
374