xref: /petsc/src/ksp/pc/impls/redistribute/redistribute.c (revision e91c6855ec8eedcfe894fb1dc8d9ee5acd2335f4)
1 #define PETSCKSP_DLL
2 
3 /*
4   This file defines a "solve the problem redistributely on each subgroup of processor" preconditioner.
5 */
6 #include "private/pcimpl.h"     /*I "petscksp.h" I*/
7 #include "petscksp.h"
8 
9 typedef struct {
10   KSP          ksp;
11   Vec          x,b;
12   VecScatter   scatter;
13   IS           is;
14   PetscInt     dcnt,*drows;   /* these are the local rows that have only diagonal entry */
15   PetscScalar  *diag;
16   Vec          work;
17 } PC_Redistribute;
18 
19 #undef __FUNCT__
20 #define __FUNCT__ "PCView_Redistribute"
21 static PetscErrorCode PCView_Redistribute(PC pc,PetscViewer viewer)
22 {
23   PC_Redistribute *red = (PC_Redistribute*)pc->data;
24   PetscErrorCode  ierr;
25   PetscBool       iascii,isstring;
26   PetscInt        ncnt,N;
27 
28   PetscFunctionBegin;
29   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERASCII,&iascii);CHKERRQ(ierr);
30   ierr = PetscTypeCompare((PetscObject)viewer,PETSCVIEWERSTRING,&isstring);CHKERRQ(ierr);
31   if (iascii) {
32     ierr = MPI_Allreduce(&red->dcnt,&ncnt,1,MPIU_INT,MPI_SUM,((PetscObject)pc)->comm);CHKERRQ(ierr);
33     ierr = MatGetSize(pc->pmat,&N,PETSC_NULL);CHKERRQ(ierr);
34     ierr = PetscViewerASCIIPrintf(viewer,"    Number rows eliminated %D Percentage rows eliminated %g\n",ncnt,100.0*((PetscReal)ncnt)/((PetscReal)N));CHKERRQ(ierr);
35     ierr = PetscViewerASCIIPrintf(viewer,"  Redistribute preconditioner: \n");CHKERRQ(ierr);
36     ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr);
37   } else if (isstring) {
38     ierr = PetscViewerStringSPrintf(viewer," Redistribute preconditioner");CHKERRQ(ierr);
39     ierr = KSPView(red->ksp,viewer);CHKERRQ(ierr);
40   } else {
41     SETERRQ1(((PetscObject)pc)->comm,PETSC_ERR_SUP,"Viewer type %s not supported for PC redistribute",((PetscObject)viewer)->type_name);
42   }
43   PetscFunctionReturn(0);
44 }
45 
46 #undef __FUNCT__
47 #define __FUNCT__ "PCSetUp_Redistribute"
48 static PetscErrorCode PCSetUp_Redistribute(PC pc)
49 {
50   PC_Redistribute   *red = (PC_Redistribute*)pc->data;
51   PetscErrorCode    ierr;
52   MPI_Comm          comm;
53   PetscInt          rstart,rend,i,nz,cnt,*rows,ncnt,dcnt,*drows;
54   PetscLayout       map,nmap;
55   PetscMPIInt       size,imdex,tag,n;
56   PetscInt          *source = PETSC_NULL;
57   PetscMPIInt       *nprocs = PETSC_NULL,nrecvs;
58   PetscInt          j,nsends;
59   PetscInt          *owner = PETSC_NULL,*starts = PETSC_NULL,count,slen;
60   PetscInt          *rvalues,*svalues,recvtotal;
61   PetscMPIInt       *onodes1,*olengths1;
62   MPI_Request       *send_waits = PETSC_NULL,*recv_waits = PETSC_NULL;
63   MPI_Status        recv_status,*send_status;
64   Vec               tvec,diag;
65   Mat               tmat;
66   const PetscScalar *d;
67 
68   PetscFunctionBegin;
69   if (pc->setupcalled) {
70     ierr = KSPGetOperators(red->ksp,PETSC_NULL,&tmat,PETSC_NULL);CHKERRQ(ierr);
71     ierr = MatGetSubMatrix(pc->pmat,red->is,red->is,MAT_REUSE_MATRIX,&tmat);CHKERRQ(ierr);
72     ierr = KSPSetOperators(red->ksp,tmat,tmat,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
73   } else {
74     PetscInt NN;
75 
76     ierr = PetscObjectGetComm((PetscObject)pc,&comm);CHKERRQ(ierr);
77     ierr = MPI_Comm_size(comm,&size);CHKERRQ(ierr);
78     ierr = PetscObjectGetNewTag((PetscObject)pc,&tag);CHKERRQ(ierr);
79 
80     /* count non-diagonal rows on process */
81     ierr = MatGetOwnershipRange(pc->mat,&rstart,&rend);CHKERRQ(ierr);
82     cnt  = 0;
83     for (i=rstart; i<rend; i++) {
84       ierr = MatGetRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
85       if (nz > 1) cnt++;
86       ierr = MatRestoreRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
87     }
88     ierr = PetscMalloc(cnt*sizeof(PetscInt),&rows);CHKERRQ(ierr);
89     ierr = PetscMalloc((rend - rstart - cnt)*sizeof(PetscInt),&drows);CHKERRQ(ierr);
90 
91     /* list non-diagonal rows on process */
92     cnt  = 0; dcnt = 0;
93     for (i=rstart; i<rend; i++) {
94       ierr = MatGetRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
95       if (nz > 1) rows[cnt++] = i;
96       else drows[dcnt++] = i - rstart;
97       ierr = MatRestoreRow(pc->mat,i,&nz,PETSC_NULL,PETSC_NULL);CHKERRQ(ierr);
98     }
99 
100     /* create PetscLayout for non-diagonal rows on each process */
101     ierr = PetscLayoutCreate(comm,&map);CHKERRQ(ierr);
102     ierr = PetscLayoutSetLocalSize(map,cnt);CHKERRQ(ierr);
103     ierr = PetscLayoutSetBlockSize(map,1);CHKERRQ(ierr);
104     ierr = PetscLayoutSetUp(map);CHKERRQ(ierr);
105     rstart = map->rstart;
106     rend   = map->rend;
107 
108     /* create PetscLayout for load-balanced non-diagonal rows on each process */
109     ierr = PetscLayoutCreate(comm,&nmap);CHKERRQ(ierr);
110     ierr = MPI_Allreduce(&cnt,&ncnt,1,MPIU_INT,MPI_SUM,comm);CHKERRQ(ierr);
111     ierr = PetscLayoutSetSize(nmap,ncnt);CHKERRQ(ierr);
112     ierr = PetscLayoutSetBlockSize(nmap,1);CHKERRQ(ierr);
113     ierr = PetscLayoutSetUp(nmap);CHKERRQ(ierr);
114 
115     ierr = MatGetSize(pc->pmat,&NN,PETSC_NULL);CHKERRQ(ierr);
116     ierr = PetscInfo2(pc,"Number of diagonal rows eliminated %d, percentage eliminated %g\n",NN-ncnt,((PetscReal)(NN-ncnt))/((PetscReal)(NN)));
117     /*
118 	this code is taken from VecScatterCreate_PtoS()
119 	Determines what rows need to be moved where to
120 	load balance the non-diagonal rows
121     */
122     /*  count number of contributors to each processor */
123     ierr = PetscMalloc2(size,PetscMPIInt,&nprocs,cnt,PetscInt,&owner);CHKERRQ(ierr);
124     ierr = PetscMemzero(nprocs,size*sizeof(PetscMPIInt));CHKERRQ(ierr);
125     j      = 0;
126     nsends = 0;
127     for (i=rstart; i<rend; i++) {
128       if (i < nmap->range[j]) j = 0;
129       for (; j<size; j++) {
130 	if (i < nmap->range[j+1]) {
131 	  if (!nprocs[j]++) nsends++;
132 	  owner[i-rstart] = j;
133 	  break;
134 	}
135       }
136     }
137     /* inform other processors of number of messages and max length*/
138     ierr = PetscGatherNumberOfMessages(comm,PETSC_NULL,nprocs,&nrecvs);CHKERRQ(ierr);
139     ierr = PetscGatherMessageLengths(comm,nsends,nrecvs,nprocs,&onodes1,&olengths1);CHKERRQ(ierr);
140     ierr = PetscSortMPIIntWithArray(nrecvs,onodes1,olengths1);CHKERRQ(ierr);
141     recvtotal = 0; for (i=0; i<nrecvs; i++) recvtotal += olengths1[i];
142 
143     /* post receives:  rvalues - rows I will own; count - nu */
144     ierr = PetscMalloc3(recvtotal,PetscInt,&rvalues,nrecvs,PetscInt,&source,nrecvs,MPI_Request,&recv_waits);CHKERRQ(ierr);
145     count  = 0;
146     for (i=0; i<nrecvs; i++) {
147       ierr  = MPI_Irecv((rvalues+count),olengths1[i],MPIU_INT,onodes1[i],tag,comm,recv_waits+i);CHKERRQ(ierr);
148       count += olengths1[i];
149     }
150 
151     /* do sends:
152        1) starts[i] gives the starting index in svalues for stuff going to
153        the ith processor
154     */
155     ierr = PetscMalloc3(cnt,PetscInt,&svalues,nsends,MPI_Request,&send_waits,size,PetscInt,&starts);CHKERRQ(ierr);
156     starts[0]  = 0;
157     for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[i-1];}
158     for (i=0; i<cnt; i++) {
159       svalues[starts[owner[i]]++] = rows[i];
160     }
161     for (i=0; i<cnt; i++) rows[i] = rows[i] - rstart;
162     red->drows = drows;
163     red->dcnt  = dcnt;
164     ierr = PetscFree(rows);CHKERRQ(ierr);
165 
166     starts[0] = 0;
167     for (i=1; i<size; i++) { starts[i] = starts[i-1] + nprocs[i-1];}
168     count = 0;
169     for (i=0; i<size; i++) {
170       if (nprocs[i]) {
171 	ierr = MPI_Isend(svalues+starts[i],nprocs[i],MPIU_INT,i,tag,comm,send_waits+count++);CHKERRQ(ierr);
172       }
173     }
174 
175     /*  wait on receives */
176     count  = nrecvs;
177     slen   = 0;
178     while (count) {
179       ierr = MPI_Waitany(nrecvs,recv_waits,&imdex,&recv_status);CHKERRQ(ierr);
180       /* unpack receives into our local space */
181       ierr = MPI_Get_count(&recv_status,MPIU_INT,&n);CHKERRQ(ierr);
182       slen += n;
183       count--;
184     }
185     if (slen != recvtotal) SETERRQ2(PETSC_COMM_SELF,PETSC_ERR_PLIB,"Total message lengths %D not expected %D",slen,recvtotal);
186 
187     ierr = ISCreateGeneral(comm,slen,rvalues,PETSC_COPY_VALUES,&red->is);CHKERRQ(ierr);
188 
189     /* free up all work space */
190     ierr = PetscFree(olengths1);CHKERRQ(ierr);
191     ierr = PetscFree(onodes1);CHKERRQ(ierr);
192     ierr = PetscFree3(rvalues,source,recv_waits);CHKERRQ(ierr);
193     ierr = PetscFree2(nprocs,owner);CHKERRQ(ierr);
194     if (nsends) {   /* wait on sends */
195       ierr = PetscMalloc(nsends*sizeof(MPI_Status),&send_status);CHKERRQ(ierr);
196       ierr = MPI_Waitall(nsends,send_waits,send_status);CHKERRQ(ierr);
197       ierr = PetscFree(send_status);CHKERRQ(ierr);
198     }
199     ierr = PetscFree3(svalues,send_waits,starts);CHKERRQ(ierr);
200     ierr = PetscLayoutDestroy(map);CHKERRQ(ierr);
201     ierr = PetscLayoutDestroy(nmap);CHKERRQ(ierr);
202 
203     ierr = VecCreateMPI(comm,slen,PETSC_DETERMINE,&red->b);CHKERRQ(ierr);
204     ierr = VecDuplicate(red->b,&red->x);CHKERRQ(ierr);
205     ierr = MatGetVecs(pc->pmat,&tvec,PETSC_NULL);CHKERRQ(ierr);
206     ierr = VecScatterCreate(tvec,red->is,red->b,PETSC_NULL,&red->scatter);CHKERRQ(ierr);
207     ierr = VecDestroy(tvec);CHKERRQ(ierr);
208     ierr = MatGetSubMatrix(pc->pmat,red->is,red->is,MAT_INITIAL_MATRIX,&tmat);CHKERRQ(ierr);
209     ierr = KSPSetOperators(red->ksp,tmat,tmat,SAME_NONZERO_PATTERN);CHKERRQ(ierr);
210     ierr = MatDestroy(tmat);CHKERRQ(ierr);
211   }
212 
213   /* get diagonal portion of matrix */
214   ierr = PetscMalloc(red->dcnt*sizeof(PetscScalar),&red->diag);CHKERRQ(ierr);
215   ierr = MatGetVecs(pc->pmat,&diag,PETSC_NULL);CHKERRQ(ierr);
216   ierr = MatGetDiagonal(pc->pmat,diag);CHKERRQ(ierr);
217   ierr = VecGetArrayRead(diag,&d);CHKERRQ(ierr);
218   for (i=0; i<red->dcnt; i++) {
219     red->diag[i] = 1.0/d[red->drows[i]];
220   }
221   ierr = VecRestoreArrayRead(diag,&d);CHKERRQ(ierr);
222   ierr = VecDestroy(diag);CHKERRQ(ierr);
223   ierr = KSPSetUp(red->ksp);CHKERRQ(ierr);
224   PetscFunctionReturn(0);
225 }
226 
227 #undef __FUNCT__
228 #define __FUNCT__ "PCApply_Redistribute"
229 static PetscErrorCode PCApply_Redistribute(PC pc,Vec b,Vec x)
230 {
231   PC_Redistribute   *red = (PC_Redistribute*)pc->data;
232   PetscErrorCode    ierr;
233   PetscInt          dcnt = red->dcnt,i;
234   const PetscInt    *drows = red->drows;
235   PetscScalar       *xwork;
236   const PetscScalar *bwork,*diag = red->diag;
237 
238   PetscFunctionBegin;
239   if (!red->work) {
240     ierr = VecDuplicate(b,&red->work);CHKERRQ(ierr);
241   }
242   /* compute the rows of solution that have diagonal entries only */
243   ierr = VecSet(x,0.0);CHKERRQ(ierr);         /* x = diag(A)^{-1} b */
244   ierr = VecGetArray(x,&xwork);CHKERRQ(ierr);
245   ierr = VecGetArrayRead(b,&bwork);CHKERRQ(ierr);
246   for (i=0; i<dcnt; i++) {
247     xwork[drows[i]] = diag[i]*bwork[drows[i]];
248   }
249   ierr = PetscLogFlops(dcnt);CHKERRQ(ierr);
250   ierr = VecRestoreArray(red->work,&xwork);CHKERRQ(ierr);
251   ierr = VecRestoreArrayRead(b,&bwork);CHKERRQ(ierr);
252   /* update the right hand side for the reduced system with diagonal rows (and corresponding columns) removed */
253   ierr = MatMult(pc->pmat,x,red->work);CHKERRQ(ierr);
254   ierr = VecAYPX(red->work,-1.0,b);CHKERRQ(ierr);   /* red->work = b - A x */
255 
256   ierr = VecScatterBegin(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
257   ierr = VecScatterEnd(red->scatter,red->work,red->b,INSERT_VALUES,SCATTER_FORWARD);CHKERRQ(ierr);
258   ierr = KSPSolve(red->ksp,red->b,red->x);CHKERRQ(ierr);
259   ierr = VecScatterBegin(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
260   ierr = VecScatterEnd(red->scatter,red->x,x,INSERT_VALUES,SCATTER_REVERSE);CHKERRQ(ierr);
261   PetscFunctionReturn(0);
262 }
263 
264 #undef __FUNCT__
265 #define __FUNCT__ "PCDestroy_Redistribute"
266 static PetscErrorCode PCDestroy_Redistribute(PC pc)
267 {
268   PC_Redistribute *red = (PC_Redistribute*)pc->data;
269   PetscErrorCode  ierr;
270 
271   PetscFunctionBegin;
272   if (red->scatter)  {ierr = VecScatterDestroy(red->scatter);CHKERRQ(ierr);}
273   if (red->is)       {ierr = ISDestroy(red->is);CHKERRQ(ierr);}
274   if (red->b)        {ierr = VecDestroy(red->b);CHKERRQ(ierr);}
275   if (red->x)        {ierr = VecDestroy(red->x);CHKERRQ(ierr);}
276   if (red->ksp)      {ierr = KSPDestroy(red->ksp);CHKERRQ(ierr);}
277   if (red->work)     {ierr = VecDestroy(red->work);CHKERRQ(ierr);}
278   ierr = PetscFree(red->drows);
279   ierr = PetscFree(red->diag);
280   ierr = PetscFree(pc->data);CHKERRQ(ierr);
281   PetscFunctionReturn(0);
282 }
283 
284 #undef __FUNCT__
285 #define __FUNCT__ "PCSetFromOptions_Redistribute"
286 static PetscErrorCode PCSetFromOptions_Redistribute(PC pc)
287 {
288   PetscErrorCode  ierr;
289   PC_Redistribute *red = (PC_Redistribute*)pc->data;
290 
291   PetscFunctionBegin;
292   ierr = KSPSetFromOptions(red->ksp);CHKERRQ(ierr);
293   PetscFunctionReturn(0);
294 }
295 
296 #undef __FUNCT__
297 #define __FUNCT__ "PCRedistributeGetKSP"
298 /*@
299    PCRedistributeGetKSP - Gets the KSP created by the PCREDISTRIBUTE
300 
301    Not Collective
302 
303    Input Parameter:
304 .  pc - the preconditioner context
305 
306    Output Parameter:
307 .  innerksp - the inner KSP
308 
309    Level: advanced
310 
311 .keywords: PC, redistribute solve
312 @*/
313 PetscErrorCode  PCRedistributeGetKSP(PC pc,KSP *innerksp)
314 {
315   PC_Redistribute *red = (PC_Redistribute*)pc->data;
316 
317   PetscFunctionBegin;
318   PetscValidHeaderSpecific(pc,PC_CLASSID,1);
319   PetscValidPointer(innerksp,2);
320   *innerksp = red->ksp;
321   PetscFunctionReturn(0);
322 }
323 
324 /* -------------------------------------------------------------------------------------*/
325 /*MC
326      PCREDISTRIBUTE - Redistributes a matrix for load balancing, removing the rows that only have a diagonal entry and then applys a KSP to that new matrix
327 
328      Options for the redistribute preconditioners can be set with -redistribute_ksp_xxx <values> and -redistribute_pc_xxx <values>
329 
330      Notes:  Usually run this with -ksp_type preonly
331 
332      If you have used MatZeroRows() to eliminate (for example, Dirichlet) boundary conditions for a symmetric problem then you can use, for example, -ksp_type preonly
333      -pc_type redistribute -redistribute_ksp_type cg -redistribute_pc_type bjacobi -redistribute_sub_pc_type icc to take advantage of the symmetry.
334 
335    Level: intermediate
336 
337 .seealso:  PCCreate(), PCSetType(), PCType (for list of available types), PCRedistributeGetKSP()
338 M*/
339 
340 EXTERN_C_BEGIN
341 #undef __FUNCT__
342 #define __FUNCT__ "PCCreate_Redistribute"
343 PetscErrorCode  PCCreate_Redistribute(PC pc)
344 {
345   PetscErrorCode  ierr;
346   PC_Redistribute *red;
347   const char      *prefix;
348 
349   PetscFunctionBegin;
350   ierr = PetscNewLog(pc,PC_Redistribute,&red);CHKERRQ(ierr);
351   pc->data            = (void*)red;
352 
353   pc->ops->apply           = PCApply_Redistribute;
354   pc->ops->applytranspose  = 0;
355   pc->ops->setup           = PCSetUp_Redistribute;
356   pc->ops->destroy         = PCDestroy_Redistribute;
357   pc->ops->setfromoptions  = PCSetFromOptions_Redistribute;
358   pc->ops->view            = PCView_Redistribute;
359 
360   ierr = KSPCreate(((PetscObject)pc)->comm,&red->ksp);CHKERRQ(ierr);
361   ierr = PetscObjectIncrementTabLevel((PetscObject)red->ksp,(PetscObject)pc,1);CHKERRQ(ierr);
362   ierr = PetscLogObjectParent(pc,red->ksp);CHKERRQ(ierr);
363   ierr = PCGetOptionsPrefix(pc,&prefix);CHKERRQ(ierr);
364   ierr = KSPSetOptionsPrefix(red->ksp,prefix);CHKERRQ(ierr);
365   ierr = KSPAppendOptionsPrefix(red->ksp,"redistribute_");CHKERRQ(ierr);
366   PetscFunctionReturn(0);
367 }
368 EXTERN_C_END
369